Compare commits

...

15 commits

Author SHA1 Message Date
yu-i-i
05d7dac34c Admin panel: improved naming for clarity 2025-07-22 14:19:07 +02:00
Kcho
9596fd3c59 Add List of registered users to admins 2025-07-22 14:19:07 +02:00
Jakob Ackermann
0546fb7233 [third-party-datastore] improve error handling (#26881)
* [third-party-datastore] use generic serializer for dropboxError

The `err` serializer will not pick up all the dropbox fields.

Co-authored-by: Thomas Mees <thomas.mees@overleaf.com>

* [third-party-datastore] handle user_suspended like insufficient_space

Unlink dropbox and display a notification (same key to clear later).

Co-authored-by: Thomas Mees <thomas.mees@overleaf.com>

* [third-party-datastore] skip retries when rejected with disallowed_name

Co-authored-by: Thomas Mees <thomas.mees@overleaf.com>

* [web] sort translations

* [web] update copy for dropbox_unlinked_because_suspended

Co-authored-by: Kamal Arkinstall <kamal.arkinstall@overleaf.com>

---------

Co-authored-by: Thomas Mees <thomas.mees@overleaf.com>
Co-authored-by: Kamal Arkinstall <kamal.arkinstall@overleaf.com>
GitOrigin-RevId: 8fbb9074d1d6eb879e904d79dd4b2a2c952ff902
2025-07-22 08:07:13 +00:00
Jakob Ackermann
b1880ba64d [monorepo] upgrade tough-cookie in request to latest version (#27249)
GitOrigin-RevId: 9096e05d2c337c3d3a9b4ca6efec8fd40c51a622
2025-07-22 08:07:08 +00:00
Jakob Ackermann
082121d3da [web] reject upload requests without a file path (#27156)
* [web] reject upload requests without a file path

* [web] update copy on error message and link to contact form

Co-authored-by: Kamal Arkinstall <kamal.arkinstall@overleaf.com>

* [web] update copy: move dot to the end

---------

Co-authored-by: Kamal Arkinstall <kamal.arkinstall@overleaf.com>
GitOrigin-RevId: ba1ee81a91b046540caeb2f3f3da0e305611b35f
2025-07-22 08:07:03 +00:00
Jakob Ackermann
81f0807fc6 [web] prepare filestore migration for Server Pro/CE (#27230)
* [web] prepare filestore migration for Server Pro/CE

* [history-v1] remove unused USER_FILES_BUCKET_NAME env var from script

* [server-ce] tests: write default docker-compose.override.yml on startup

* [server-ce] tests: extend access logging of host-admin for response

* [server-ce] tests: test text and binary file upload

* [server-ce] tests: add tests for filestore migration

* [web] simplify feature gate for filestore/project-history-blobs logic

Co-authored-by: Brian Gough <brian.gough@overleaf.com>

* [server-ce] test: fix flaky test helper

---------

Co-authored-by: Brian Gough <brian.gough@overleaf.com>
GitOrigin-RevId: f89bdab2749e2b7a49d609e2eac6bf621c727966
2025-07-22 08:06:58 +00:00
Jakob Ackermann
bf43d4f709 [history-v1] make back_fill_file_hash_fix_up compatible with Server Pro (#27280)
* [history-v1] move MockFilestore into shared place

Co-authored-by: Brian Gough <brian.gough@overleaf.com>

* [history-v1] make back_fill_file_hash_fix_up compatible with Server Pro

---------

Co-authored-by: Brian Gough <brian.gough@overleaf.com>
GitOrigin-RevId: 70ea57e1503031d9f14dcd60c4c110e746450587
2025-07-22 08:06:41 +00:00
David
ae3f63d37f Merge pull request #27209 from overleaf/dp-collaborator-colour
Adapt online user and chat user colors based on luminance

GitOrigin-RevId: 1b0c843147ee3dc585866bc491a7c7613cb00e70
2025-07-22 08:06:32 +00:00
Antoine Clausse
30b0cabbbc [web] Update tests to add emails with 6-digits flow (#27076)
* In tests, post to `/user/emails/secondary` (6-digits) instead of the deprecated `/user/emails` (link-token)

* Update `addEmailAndConfirm` so it calls the right endpoint

* Remove unnecessary `userId` from `confirmEmail` and `addEmailAndConfirm` args

* Use `updateUser` to add unconfirmed email to user

* Confirm, then unconfirm emails, in order to test on unconfirmed emails

* Lowercase emails in `unconfirmSecondaryEmail`, so they get matched correctly

* Update UserEmailsTests.mjs with 6-digits flow, fetch, no `npm:async`

GitOrigin-RevId: 71b9ed65daebea5f22272240559caab375515f0c
2025-07-22 08:06:23 +00:00
Tim Down
2f427ef0e0 Merge pull request #27229 from overleaf/td-group-pricing-select
Allow clicks on icon in group plans select lists to open the select

GitOrigin-RevId: d54b27851cb8b5541d71c48ff815d52cf99db16f
2025-07-22 08:06:10 +00:00
Tim Down
0778bab910 Merge pull request #27254 from overleaf/td-project-dashboard-cookie-banner
Implement React cookie banner on project dashboard

GitOrigin-RevId: 95d2778d7ce7cb3054a06b06486b815a3453a623
2025-07-22 08:06:05 +00:00
Domagoj Kriskovic
d5b5710d01 Add docModified hook in ds-mobile-app module (#27196)
* Add docModified hook in ds-mobile-app module

* use Object.entries when iterating over promises

* avoid project lookup

* update tests

GitOrigin-RevId: 88676746f56558a97ce31010b57f5eeb254fefef
2025-07-22 08:05:56 +00:00
Domagoj Kriskovic
868d562d96 Support password-fallbackPassword array in requireBasicAuth (#27237)
GitOrigin-RevId: 33b15a05996bfa0190041f347772867a9667e2ca
2025-07-22 08:05:51 +00:00
Andrew Rumble
5d79cf18c0 Define all initial roles
GitOrigin-RevId: ad613bad4d8a47e327281e90b5475e989a3ccec4
2025-07-22 08:05:42 +00:00
Christopher Hoskin
7ecee2e0aa Merge pull request #27255 from overleaf/revert-27252-revert-26843-csh-issue-26608-mongo8-dev-ci
Revert "Revert "Upgrade the dev environment and CI to mongo 8""

GitOrigin-RevId: 5074b012504e65240017f1fde9b0d8d04c7b8b61
2025-07-22 08:05:25 +00:00
88 changed files with 1590 additions and 544 deletions

31
package-lock.json generated
View file

@ -35581,6 +35581,7 @@
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
"deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142",
"license": "Apache-2.0",
"dependencies": {
"aws-sign2": "~0.7.0",
"aws4": "^1.8.0",
@ -35638,15 +35639,15 @@
}
},
"node_modules/request/node_modules/tough-cookie": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz",
"integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==",
"license": "BSD-3-Clause",
"dependencies": {
"psl": "^1.1.28",
"punycode": "^2.1.1"
"tldts": "^6.1.32"
},
"engines": {
"node": ">=0.8"
"node": ">=16"
}
},
"node_modules/requestretry": {
@ -39612,6 +39613,24 @@
"tlds": "bin.js"
}
},
"node_modules/tldts": {
"version": "6.1.86",
"resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.86.tgz",
"integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==",
"license": "MIT",
"dependencies": {
"tldts-core": "^6.1.86"
},
"bin": {
"tldts": "bin/cli.js"
}
},
"node_modules/tldts-core": {
"version": "6.1.86",
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.86.tgz",
"integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==",
"license": "MIT"
},
"node_modules/tmp": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz",

View file

@ -33,6 +33,9 @@
"path-to-regexp": "3.3.0",
"body-parser": "1.20.3",
"multer": "2.0.1"
},
"request@2.88.2": {
"tough-cookie": "5.1.2"
}
},
"scripts": {

View file

@ -21,9 +21,11 @@ test-e2e-native:
test-e2e:
docker compose build host-admin
docker compose up -d host-admin
docker compose up --no-log-prefix --exit-code-from=e2e e2e
test-e2e-open:
docker compose up -d host-admin
docker compose up --no-log-prefix --exit-code-from=e2e-open e2e-open
clean:

View file

@ -35,7 +35,7 @@ services:
MAILTRAP_PASSWORD: 'password-for-mailtrap'
mongo:
image: mongo:6.0
image: mongo:8.0.11
command: '--replSet overleaf'
volumes:
- ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -2,6 +2,7 @@ import {
createNewFile,
createProject,
openProjectById,
testNewFileUpload,
} from './helpers/project'
import { isExcludedBySharding, startWith } from './helpers/config'
import { ensureUserExists, login } from './helpers/login'
@ -119,24 +120,7 @@ describe('editor', () => {
cy.get('button').contains('New file').click({ force: true })
})
it('can upload file', () => {
const name = `${uuid()}.txt`
const content = `Test File Content ${name}`
cy.get('button').contains('Upload').click({ force: true })
cy.get('input[type=file]')
.first()
.selectFile(
{
contents: Cypress.Buffer.from(content),
fileName: name,
lastModified: Date.now(),
},
{ force: true }
)
// force: The file-tree pane is too narrow to display the full name.
cy.findByTestId('file-tree').findByText(name).click({ force: true })
cy.findByText(content)
})
testNewFileUpload()
it('should not display import from URL', () => {
cy.findByText('From external URL').should('not.exist')

View file

@ -0,0 +1,104 @@
import { ensureUserExists, login } from './helpers/login'
import {
createProject,
openProjectById,
prepareFileUploadTest,
} from './helpers/project'
import { isExcludedBySharding, startWith } from './helpers/config'
import { prepareWaitForNextCompileSlot } from './helpers/compile'
import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry'
import { v4 as uuid } from 'uuid'
import { purgeFilestoreData, runScript } from './helpers/hostAdminClient'
describe('filestore migration', function () {
if (isExcludedBySharding('CE_CUSTOM_3')) return
startWith({ withDataDir: true, resetData: true, vars: {} })
ensureUserExists({ email: 'user@example.com' })
let projectName: string
let projectId: string
let waitForCompileRateLimitCoolOff: (fn: () => void) => void
const previousBinaryFiles: (() => void)[] = []
beforeWithReRunOnTestRetry(function () {
projectName = `project-${uuid()}`
login('user@example.com')
createProject(projectName, { type: 'Example project' }).then(
id => (projectId = id)
)
let queueReset
;({ waitForCompileRateLimitCoolOff, queueReset } =
prepareWaitForNextCompileSlot())
queueReset()
previousBinaryFiles.push(prepareFileUploadTest(true))
})
beforeEach(() => {
login('user@example.com')
waitForCompileRateLimitCoolOff(() => {
openProjectById(projectId)
})
})
function checkFilesAreAccessible() {
it('can upload new binary file and read previous uploads', function () {
previousBinaryFiles.push(prepareFileUploadTest(true))
for (const check of previousBinaryFiles) {
check()
}
})
it('renders frog jpg', () => {
cy.findByTestId('file-tree').findByText('frog.jpg').click()
cy.get('[alt="frog.jpg"]')
.should('be.visible')
.and('have.prop', 'naturalWidth')
.should('be.greaterThan', 0)
})
}
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL not set', function () {
startWith({ withDataDir: true, vars: {} })
checkFilesAreAccessible()
})
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=0', function () {
startWith({
withDataDir: true,
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '0' },
})
checkFilesAreAccessible()
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=1', function () {
startWith({
withDataDir: true,
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' },
})
checkFilesAreAccessible()
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=2', function () {
startWith({
withDataDir: true,
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' },
})
before(async function () {
await runScript({
cwd: 'services/history-v1',
script: 'storage/scripts/back_fill_file_hash.mjs',
})
})
startWith({
withDataDir: true,
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '2' },
})
checkFilesAreAccessible()
describe('purge filestore data', function () {
before(async function () {
await purgeFilestoreData()
})
checkFilesAreAccessible()
})
})
})
})
})

View file

@ -9,6 +9,7 @@ export function isExcludedBySharding(
| 'CE_DEFAULT'
| 'CE_CUSTOM_1'
| 'CE_CUSTOM_2'
| 'CE_CUSTOM_3'
| 'PRO_DEFAULT_1'
| 'PRO_DEFAULT_2'
| 'PRO_CUSTOM_1'

View file

@ -85,6 +85,12 @@ export async function getRedisKeys() {
return stdout.split('\n')
}
export async function purgeFilestoreData() {
await fetchJSON(`${hostAdminURL}/data/user_files`, {
method: 'DELETE',
})
}
async function sleep(ms: number) {
return new Promise(resolve => {
setTimeout(resolve, ms)

View file

@ -216,3 +216,43 @@ export function createNewFile() {
return fileName
}
export function prepareFileUploadTest(binary = false) {
const name = `${uuid()}.txt`
const content = `Test File Content ${name}${binary ? ' \x00' : ''}`
cy.get('button').contains('Upload').click({ force: true })
cy.get('input[type=file]')
.first()
.selectFile(
{
contents: Cypress.Buffer.from(content),
fileName: name,
lastModified: Date.now(),
},
{ force: true }
)
// wait for the upload to finish
cy.findByRole('treeitem', { name })
return function check() {
cy.findByRole('treeitem', { name }).click()
if (binary) {
cy.findByText(content).should('not.have.class', 'cm-line')
} else {
cy.findByText(content).should('have.class', 'cm-line')
}
}
}
export function testNewFileUpload() {
it('can upload text file', () => {
const check = prepareFileUploadTest(false)
check()
})
it('can upload binary file', () => {
const check = prepareFileUploadTest(true)
check()
})
}

View file

@ -29,6 +29,17 @@ const IMAGES = {
PRO: process.env.IMAGE_TAG_PRO.replace(/:.+/, ''),
}
function defaultDockerComposeOverride() {
return {
services: {
sharelatex: {
environment: {},
},
'git-bridge': {},
},
}
}
let previousConfig = ''
function readDockerComposeOverride() {
@ -38,14 +49,7 @@ function readDockerComposeOverride() {
if (error.code !== 'ENOENT') {
throw error
}
return {
services: {
sharelatex: {
environment: {},
},
'git-bridge': {},
},
}
return defaultDockerComposeOverride
}
}
@ -77,12 +81,21 @@ app.use(bodyParser.json())
app.use((req, res, next) => {
// Basic access logs
console.log(req.method, req.url, req.body)
const json = res.json
res.json = body => {
console.log(req.method, req.url, req.body, '->', body)
json.call(res, body)
}
next()
})
app.use((req, res, next) => {
// Add CORS headers
const accessControlAllowOrigin =
process.env.ACCESS_CONTROL_ALLOW_ORIGIN || 'http://sharelatex'
res.setHeader('Access-Control-Allow-Origin', accessControlAllowOrigin)
res.setHeader('Access-Control-Allow-Headers', 'Content-Type')
res.setHeader('Access-Control-Max-Age', '3600')
res.setHeader('Access-Control-Allow-Methods', 'DELETE, GET, HEAD, POST, PUT')
next()
})
@ -133,6 +146,7 @@ const allowedVars = Joi.object(
'V1_HISTORY_URL',
'SANDBOXED_COMPILES',
'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES',
'OVERLEAF_FILESTORE_MIGRATION_LEVEL',
'OVERLEAF_TEMPLATES_USER_ID',
'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS',
'OVERLEAF_ALLOW_PUBLIC_ACCESS',
@ -319,8 +333,19 @@ app.get('/redis/keys', (req, res) => {
)
})
app.delete('/data/user_files', (req, res) => {
runDockerCompose(
'exec',
['sharelatex', 'rm', '-rf', '/var/lib/overleaf/data/user_files'],
(error, stdout, stderr) => {
res.json({ error, stdout, stderr })
}
)
})
app.use(handleValidationErrors())
purgeDataDir()
writeDockerComposeOverride(defaultDockerComposeOverride())
app.listen(80)

View file

@ -42,7 +42,7 @@ services:
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -44,7 +44,7 @@ services:
command: npm run --silent test:acceptance
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -42,7 +42,7 @@ services:
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -44,7 +44,7 @@ services:
command: npm run --silent test:acceptance
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -47,7 +47,7 @@ services:
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -49,7 +49,7 @@ services:
command: npm run --silent test:acceptance
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -55,7 +55,7 @@ services:
retries: 20
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -57,7 +57,7 @@ services:
retries: 20
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -75,7 +75,7 @@ services:
retries: 20
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -83,7 +83,7 @@ services:
retries: 20
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -150,10 +150,6 @@ const CONCURRENT_BATCHES = parseInt(process.env.CONCURRENT_BATCHES || '2', 10)
const RETRIES = parseInt(process.env.RETRIES || '10', 10)
const RETRY_DELAY_MS = parseInt(process.env.RETRY_DELAY_MS || '100', 10)
const USER_FILES_BUCKET_NAME = process.env.USER_FILES_BUCKET_NAME || ''
if (!USER_FILES_BUCKET_NAME) {
throw new Error('env var USER_FILES_BUCKET_NAME is missing')
}
const RETRY_FILESTORE_404 = process.env.RETRY_FILESTORE_404 === 'true'
const BUFFER_DIR = fs.mkdtempSync(
process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-'

View file

@ -9,15 +9,12 @@ import { Blob } from 'overleaf-editor-core'
import {
BlobStore,
getStringLengthOfFile,
GLOBAL_BLOBS,
makeBlobForFile,
} from '../lib/blob_store/index.js'
import { db } from '../lib/mongodb.js'
import commandLineArgs from 'command-line-args'
import readline from 'node:readline'
import { _blobIsBackedUp, backupBlob } from '../lib/backupBlob.mjs'
import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js'
import filestorePersistor from '../lib/persistor.js'
import { setTimeout } from 'node:timers/promises'
// Silence warning.
@ -52,12 +49,11 @@ ObjectId.cacheHexString = true
*/
/**
* @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_DELETE_PERMISSION: boolean, FIX_MISSING_HASH: boolean, LOGS: string}}
* @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_MISSING_HASH: boolean, LOGS: string}}
*/
function parseArgs() {
const args = commandLineArgs([
{ name: 'fixNotFound', type: String, defaultValue: 'true' },
{ name: 'fixDeletePermission', type: String, defaultValue: 'true' },
{ name: 'fixHashMismatch', type: String, defaultValue: 'true' },
{ name: 'fixMissingHash', type: String, defaultValue: 'true' },
{ name: 'logs', type: String, defaultValue: '' },
@ -74,20 +70,13 @@ function parseArgs() {
}
return {
FIX_HASH_MISMATCH: boolVal('fixNotFound'),
FIX_DELETE_PERMISSION: boolVal('fixDeletePermission'),
FIX_NOT_FOUND: boolVal('fixHashMismatch'),
FIX_MISSING_HASH: boolVal('fixMissingHash'),
LOGS: args.logs,
}
}
const {
FIX_HASH_MISMATCH,
FIX_DELETE_PERMISSION,
FIX_NOT_FOUND,
FIX_MISSING_HASH,
LOGS,
} = parseArgs()
const { FIX_HASH_MISMATCH, FIX_NOT_FOUND, FIX_MISSING_HASH, LOGS } = parseArgs()
if (!LOGS) {
throw new Error('--logs parameter missing')
}
@ -105,6 +94,37 @@ const STREAM_HIGH_WATER_MARK = parseInt(
)
const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
// Filestore endpoint location
const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
async function fetchFromFilestore(projectId, fileId) {
const url = `http://${FILESTORE_HOST}:${FILESTORE_PORT}/project/${projectId}/file/${fileId}`
const response = await fetch(url)
if (!response.ok) {
if (response.status === 404) {
throw new NotFoundError('file not found in filestore', {
status: response.status,
})
}
const body = await response.text()
throw new OError('fetchFromFilestore failed', {
projectId,
fileId,
status: response.status,
body,
})
}
if (!response.body) {
throw new OError('fetchFromFilestore response has no body', {
projectId,
fileId,
status: response.status,
})
}
return response.body
}
/** @type {ProjectsCollection} */
const projectsCollection = db.collection('projects')
/** @type {DeletedProjectsCollection} */
@ -302,19 +322,16 @@ async function setHashInMongo(projectId, fileId, hash) {
* @return {Promise<void>}
*/
async function importRestoredFilestoreFile(projectId, fileId, historyId) {
const filestoreKey = `${projectId}/${fileId}`
const path = `${BUFFER_DIR}/${projectId}_${fileId}`
try {
let s
try {
s = await filestorePersistor.getObjectStream(
USER_FILES_BUCKET_NAME,
filestoreKey
)
s = await fetchFromFilestore(projectId, fileId)
} catch (err) {
if (err instanceof NotFoundError) {
throw new OError('missing blob, need to restore filestore file', {
filestoreKey,
projectId,
fileId,
})
}
throw err
@ -325,7 +342,6 @@ async function importRestoredFilestoreFile(projectId, fileId, historyId) {
)
const blobStore = new BlobStore(historyId)
const blob = await blobStore.putFile(path)
await backupBlob(historyId, blob, path)
await setHashInMongo(projectId, fileId, blob.getHash())
} finally {
await fs.promises.rm(path, { force: true })
@ -339,13 +355,9 @@ async function importRestoredFilestoreFile(projectId, fileId, historyId) {
* @return {Promise<Blob>}
*/
async function bufferFilestoreFileToDisk(projectId, fileId, path) {
const filestoreKey = `${projectId}/${fileId}`
try {
await Stream.promises.pipeline(
await filestorePersistor.getObjectStream(
USER_FILES_BUCKET_NAME,
filestoreKey
),
await fetchFromFilestore(projectId, fileId),
fs.createWriteStream(path, { highWaterMark: STREAM_HIGH_WATER_MARK })
)
const blob = await makeBlobForFile(path)
@ -356,7 +368,8 @@ async function bufferFilestoreFileToDisk(projectId, fileId, path) {
} catch (err) {
if (err instanceof NotFoundError) {
throw new OError('missing blob, need to restore filestore file', {
filestoreKey,
projectId,
fileId,
})
}
throw err
@ -389,7 +402,7 @@ async function uploadFilestoreFile(projectId, fileId) {
const blob = await bufferFilestoreFileToDisk(projectId, fileId, path)
const hash = blob.getHash()
try {
await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
await ensureBlobExistsForFile(projectId, fileId, hash)
} catch (err) {
if (!(err instanceof Blob.NotFoundError)) throw err
@ -397,7 +410,7 @@ async function uploadFilestoreFile(projectId, fileId) {
const historyId = project.overleaf.history.id.toString()
const blobStore = new BlobStore(historyId)
await blobStore.putBlob(path, blob)
await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
await ensureBlobExistsForFile(projectId, fileId, hash)
}
} finally {
await fs.promises.rm(path, { force: true })
@ -426,11 +439,7 @@ async function fixHashMismatch(line) {
await importRestoredFilestoreFile(projectId, fileId, historyId)
return true
}
return await ensureBlobExistsForFileAndUploadToAWS(
projectId,
fileId,
computedHash
)
return await ensureBlobExistsForFile(projectId, fileId, computedHash)
}
/**
@ -444,30 +453,19 @@ async function hashAlreadyUpdatedInFileTree(projectId, fileId, hash) {
return fileRef.hash === hash
}
/**
* @param {string} projectId
* @param {string} hash
* @return {Promise<boolean>}
*/
async function needsBackingUpToAWS(projectId, hash) {
if (GLOBAL_BLOBS.has(hash)) return false
return !(await _blobIsBackedUp(projectId, hash))
}
/**
* @param {string} projectId
* @param {string} fileId
* @param {string} hash
* @return {Promise<boolean>}
*/
async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
async function ensureBlobExistsForFile(projectId, fileId, hash) {
const { project } = await getProject(projectId)
const historyId = project.overleaf.history.id.toString()
const blobStore = new BlobStore(historyId)
if (
(await hashAlreadyUpdatedInFileTree(projectId, fileId, hash)) &&
(await blobStore.getBlob(hash)) &&
!(await needsBackingUpToAWS(projectId, hash))
(await blobStore.getBlob(hash))
) {
return false // already processed
}
@ -488,7 +486,7 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
)
if (writtenBlob.getHash() !== hash) {
// Double check download, better safe than sorry.
throw new OError('blob corrupted', { writtenBlob })
throw new OError('blob corrupted', { writtenBlob, hash })
}
let blob = await blobStore.getBlob(hash)
@ -497,7 +495,6 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
// HACK: Skip upload to GCS and finalize putBlob operation directly.
await blobStore.backend.insertBlob(historyId, writtenBlob)
}
await backupBlob(historyId, writtenBlob, path)
} finally {
await fs.promises.rm(path, { force: true })
}
@ -505,16 +502,6 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
return true
}
/**
* @param {string} line
* @return {Promise<boolean>}
*/
async function fixDeletePermission(line) {
let { projectId, fileId, hash } = JSON.parse(line)
if (!hash) hash = await computeFilestoreFileHash(projectId, fileId)
return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
}
/**
* @param {string} line
* @return {Promise<boolean>}
@ -526,7 +513,7 @@ async function fixMissingHash(line) {
} = await findFile(projectId, fileId)
if (hash) {
// processed, double check
return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
return await ensureBlobExistsForFile(projectId, fileId, hash)
}
await uploadFilestoreFile(projectId, fileId)
return true
@ -543,11 +530,6 @@ const CASES = {
flag: FIX_HASH_MISMATCH,
action: fixHashMismatch,
},
'delete permission': {
match: 'storage.objects.delete',
flag: FIX_DELETE_PERMISSION,
action: fixDeletePermission,
},
'missing file hash': {
match: '"bad file hash"',
flag: FIX_MISSING_HASH,

View file

@ -20,7 +20,7 @@ import {
makeProjectKey,
} from '../../../../storage/lib/blob_store/index.js'
import express from 'express'
import { mockFilestore } from './support/MockFilestore.mjs'
chai.use(chaiExclude)
const TIMEOUT = 20 * 1_000
@ -28,59 +28,6 @@ const TIMEOUT = 20 * 1_000
const projectsCollection = db.collection('projects')
const deletedProjectsCollection = db.collection('deletedProjects')
class MockFilestore {
constructor() {
this.host = process.env.FILESTORE_HOST || '127.0.0.1'
this.port = process.env.FILESTORE_PORT || 3009
// create a server listening on this.host and this.port
this.files = {}
this.app = express()
this.app.get('/project/:projectId/file/:fileId', (req, res) => {
const { projectId, fileId } = req.params
const content = this.files[projectId]?.[fileId]
if (!content) return res.status(404).end()
res.status(200).end(content)
})
}
start() {
// reset stored files
this.files = {}
// start the server
if (this.serverPromise) {
return this.serverPromise
} else {
this.serverPromise = new Promise((resolve, reject) => {
this.server = this.app.listen(this.port, this.host, err => {
if (err) return reject(err)
resolve()
})
})
return this.serverPromise
}
}
addFile(projectId, fileId, fileContent) {
if (!this.files[projectId]) {
this.files[projectId] = {}
}
this.files[projectId][fileId] = fileContent
}
deleteObject(projectId, fileId) {
if (this.files[projectId]) {
delete this.files[projectId][fileId]
if (Object.keys(this.files[projectId]).length === 0) {
delete this.files[projectId]
}
}
}
}
const mockFilestore = new MockFilestore()
/**
* @param {ObjectId} objectId
* @return {string}

View file

@ -1,48 +1,24 @@
import fs from 'node:fs'
import Crypto from 'node:crypto'
import Stream from 'node:stream'
import { promisify } from 'node:util'
import { Binary, ObjectId } from 'mongodb'
import { Blob } from 'overleaf-editor-core'
import { backedUpBlobs, blobs, db } from '../../../../storage/lib/mongodb.js'
import { db } from '../../../../storage/lib/mongodb.js'
import cleanup from './support/cleanup.js'
import testProjects from '../api/support/test_projects.js'
import { execFile } from 'node:child_process'
import chai, { expect } from 'chai'
import chaiExclude from 'chai-exclude'
import config from 'config'
import { WritableBuffer } from '@overleaf/stream-utils'
import {
backupPersistor,
projectBlobsBucket,
} from '../../../../storage/lib/backupPersistor.mjs'
import projectKey from '../../../../storage/lib/project_key.js'
import {
BlobStore,
makeProjectKey,
} from '../../../../storage/lib/blob_store/index.js'
import ObjectPersistor from '@overleaf/object-persistor'
import { BlobStore } from '../../../../storage/lib/blob_store/index.js'
import { mockFilestore } from './support/MockFilestore.mjs'
chai.use(chaiExclude)
const TIMEOUT = 20 * 1_000
const { deksBucket } = config.get('backupStore')
const { tieringStorageClass } = config.get('backupPersistor')
const projectsCollection = db.collection('projects')
const deletedProjectsCollection = db.collection('deletedProjects')
const FILESTORE_PERSISTOR = ObjectPersistor({
backend: 'gcs',
gcs: {
endpoint: {
apiEndpoint: process.env.GCS_API_ENDPOINT,
projectId: process.env.GCS_PROJECT_ID,
},
},
})
/**
* @param {ObjectId} objectId
* @return {string}
@ -70,17 +46,6 @@ function binaryForGitBlobHash(gitBlobHash) {
return new Binary(Buffer.from(gitBlobHash, 'hex'))
}
async function listS3Bucket(bucket, wantStorageClass) {
const client = backupPersistor._getClientForBucket(bucket)
const response = await client.listObjectsV2({ Bucket: bucket }).promise()
for (const object of response.Contents || []) {
expect(object).to.have.property('StorageClass', wantStorageClass)
}
return (response.Contents || []).map(item => item.Key || '')
}
function objectIdFromTime(timestamp) {
return ObjectId.createFromTime(new Date(timestamp).getTime() / 1000)
}
@ -97,7 +62,6 @@ describe('back_fill_file_hash_fix_up script', function () {
const historyIdDeleted0 = projectIdDeleted0.toString()
const fileIdWithDifferentHashFound = objectIdFromTime('2017-02-01T00:00:00Z')
const fileIdInGoodState = objectIdFromTime('2017-02-01T00:01:00Z')
const fileIdBlobExistsInGCS0 = objectIdFromTime('2017-02-01T00:02:00Z')
const fileIdWithDifferentHashNotFound0 = objectIdFromTime(
'2017-02-01T00:03:00Z'
)
@ -112,9 +76,6 @@ describe('back_fill_file_hash_fix_up script', function () {
const fileIdWithDifferentHashRestore = objectIdFromTime(
'2017-02-01T00:08:00Z'
)
const fileIdBlobExistsInGCS1 = objectIdFromTime('2017-02-01T00:09:00Z')
const fileIdRestoreFromFilestore0 = objectIdFromTime('2017-02-01T00:10:00Z')
const fileIdRestoreFromFilestore1 = objectIdFromTime('2017-02-01T00:11:00Z')
const fileIdMissing2 = objectIdFromTime('2017-02-01T00:12:00Z')
const fileIdHashMissing0 = objectIdFromTime('2017-02-01T00:13:00Z')
const fileIdHashMissing1 = objectIdFromTime('2017-02-01T00:14:00Z')
@ -125,31 +86,11 @@ describe('back_fill_file_hash_fix_up script', function () {
)
const deleteProjectsRecordId0 = new ObjectId()
const writtenBlobs = [
{
projectId: projectId0,
historyId: historyId0,
fileId: fileIdBlobExistsInGCS0,
},
{
projectId: projectId0,
historyId: historyId0,
fileId: fileIdBlobExistsInGCS1,
},
{
projectId: projectId0,
historyId: historyId0,
fileId: fileIdWithDifferentHashNotFound0,
},
{
projectId: projectId0,
historyId: historyId0,
fileId: fileIdRestoreFromFilestore0,
},
{
projectId: projectId0,
historyId: historyId0,
fileId: fileIdRestoreFromFilestore1,
},
{
projectId: projectId0,
historyId: historyId0,
@ -200,17 +141,6 @@ describe('back_fill_file_hash_fix_up script', function () {
},
msg: 'failed to process file',
},
{
projectId: projectId0,
fileId: fileIdRestoreFromFilestore0,
err: { message: 'OError: hash mismatch' },
hash: gitBlobHash(fileIdRestoreFromFilestore0),
entry: {
ctx: { historyId: historyId0.toString() },
hash: hashDoesNotExistAsBlob,
},
msg: 'failed to process file',
},
{
projectId: projectIdDeleted0,
fileId: fileIdWithDifferentHashNotFound1,
@ -236,33 +166,6 @@ describe('back_fill_file_hash_fix_up script', function () {
err: { message: 'NotFoundError' },
msg: 'failed to process file',
},
{
projectId: projectId0,
fileId: fileIdBlobExistsInGCS0,
hash: gitBlobHash(fileIdBlobExistsInGCS0),
err: { message: 'storage.objects.delete' },
msg: 'failed to process file',
},
{
projectId: projectId0,
fileId: fileIdBlobExistsInGCSCorrupted,
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
err: { message: 'storage.objects.delete' },
msg: 'failed to process file',
},
{
projectId: projectId0,
fileId: fileIdBlobExistsInGCS1,
hash: gitBlobHash(fileIdBlobExistsInGCS1),
err: { message: 'storage.objects.delete' },
msg: 'failed to process file',
},
{
projectId: projectId0,
fileId: fileIdRestoreFromFilestore1,
err: { message: 'storage.objects.delete' },
msg: 'failed to process file',
},
{
projectId: projectIdDeleted0,
fileId: fileIdMissing1,
@ -291,22 +194,23 @@ describe('back_fill_file_hash_fix_up script', function () {
reason: 'bad file hash',
msg: 'bad file-tree path',
},
{
projectId: projectId0,
_id: fileIdBlobExistsInGCSCorrupted,
reason: 'bad file hash',
msg: 'bad file-tree path',
},
]
if (PRINT_IDS_AND_HASHES_FOR_DEBUGGING) {
const fileIds = {
fileIdWithDifferentHashFound,
fileIdInGoodState,
fileIdBlobExistsInGCS0,
fileIdBlobExistsInGCS1,
fileIdWithDifferentHashNotFound0,
fileIdWithDifferentHashNotFound1,
fileIdBlobExistsInGCSCorrupted,
fileIdMissing0,
fileIdMissing1,
fileIdMissing2,
fileIdWithDifferentHashRestore,
fileIdRestoreFromFilestore0,
fileIdRestoreFromFilestore1,
fileIdHashMissing0,
fileIdHashMissing1,
}
@ -330,38 +234,25 @@ describe('back_fill_file_hash_fix_up script', function () {
before(cleanup.everything)
before('populate blobs/GCS', async function () {
await FILESTORE_PERSISTOR.sendStream(
USER_FILES_BUCKET_NAME,
`${projectId0}/${fileIdRestoreFromFilestore0}`,
Stream.Readable.from([fileIdRestoreFromFilestore0.toString()])
await mockFilestore.start()
mockFilestore.addFile(
projectId0,
fileIdHashMissing0,
fileIdHashMissing0.toString()
)
await FILESTORE_PERSISTOR.sendStream(
USER_FILES_BUCKET_NAME,
`${projectId0}/${fileIdRestoreFromFilestore1}`,
Stream.Readable.from([fileIdRestoreFromFilestore1.toString()])
mockFilestore.addFile(
projectId0,
fileIdHashMissing1,
fileIdHashMissing1.toString()
)
await FILESTORE_PERSISTOR.sendStream(
USER_FILES_BUCKET_NAME,
`${projectId0}/${fileIdHashMissing0}`,
Stream.Readable.from([fileIdHashMissing0.toString()])
)
await FILESTORE_PERSISTOR.sendStream(
USER_FILES_BUCKET_NAME,
`${projectId0}/${fileIdHashMissing1}`,
Stream.Readable.from([fileIdHashMissing1.toString()])
mockFilestore.addFile(
projectId0,
fileIdBlobExistsInGCSCorrupted,
fileIdBlobExistsInGCSCorrupted.toString()
)
await new BlobStore(historyId0.toString()).putString(
fileIdHashMissing1.toString() // partially processed
)
await new BlobStore(historyId0.toString()).putString(
fileIdBlobExistsInGCS0.toString()
)
await new BlobStore(historyId0.toString()).putString(
fileIdBlobExistsInGCS1.toString()
)
await new BlobStore(historyId0.toString()).putString(
fileIdRestoreFromFilestore1.toString()
)
const path = '/tmp/test-blob-corrupted'
try {
await fs.promises.writeFile(path, contentCorruptedBlob)
@ -426,22 +317,10 @@ describe('back_fill_file_hash_fix_up script', function () {
_id: fileIdWithDifferentHashNotFound0,
hash: hashDoesNotExistAsBlob,
},
{
_id: fileIdRestoreFromFilestore0,
hash: hashDoesNotExistAsBlob,
},
{
_id: fileIdRestoreFromFilestore1,
},
{
_id: fileIdBlobExistsInGCS0,
hash: gitBlobHash(fileIdBlobExistsInGCS0),
},
{
_id: fileIdBlobExistsInGCSCorrupted,
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
},
{ _id: fileIdBlobExistsInGCS1 },
],
folders: [],
},
@ -546,8 +425,8 @@ describe('back_fill_file_hash_fix_up script', function () {
})
it('should print stats', function () {
expect(stats).to.contain({
processedLines: 16,
success: 11,
processedLines: 12,
success: 7,
alreadyProcessed: 0,
fileDeleted: 0,
skipped: 0,
@ -558,9 +437,9 @@ describe('back_fill_file_hash_fix_up script', function () {
it('should handle re-run on same logs', async function () {
;({ stats } = await runScriptWithLogs())
expect(stats).to.contain({
processedLines: 16,
processedLines: 12,
success: 0,
alreadyProcessed: 8,
alreadyProcessed: 4,
fileDeleted: 3,
skipped: 0,
failed: 3,
@ -663,31 +542,11 @@ describe('back_fill_file_hash_fix_up script', function () {
_id: fileIdWithDifferentHashNotFound0,
hash: gitBlobHash(fileIdWithDifferentHashNotFound0),
},
// Updated hash
{
_id: fileIdRestoreFromFilestore0,
hash: gitBlobHash(fileIdRestoreFromFilestore0),
},
// Added hash
{
_id: fileIdRestoreFromFilestore1,
hash: gitBlobHash(fileIdRestoreFromFilestore1),
},
// No change, blob created
{
_id: fileIdBlobExistsInGCS0,
hash: gitBlobHash(fileIdBlobExistsInGCS0),
},
// No change, flagged
{
_id: fileIdBlobExistsInGCSCorrupted,
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
},
// Added hash
{
_id: fileIdBlobExistsInGCS1,
hash: gitBlobHash(fileIdBlobExistsInGCS1),
},
],
folders: [],
},
@ -696,7 +555,7 @@ describe('back_fill_file_hash_fix_up script', function () {
],
overleaf: { history: { id: historyId0 } },
// Incremented when removing file/updating hash
version: 8,
version: 5,
},
])
expect(await deletedProjectsCollection.find({}).toArray()).to.deep.equal([
@ -745,62 +604,6 @@ describe('back_fill_file_hash_fix_up script', function () {
(writtenBlobsByProject.get(projectId) || []).concat([fileId])
)
}
expect(
(await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map(
entry => {
// blobs are pushed unordered into mongo. Sort the list for consistency.
entry.blobs.sort()
return entry
}
)
).to.deep.equal(
Array.from(writtenBlobsByProject.entries()).map(
([projectId, fileIds]) => {
return {
_id: projectId,
blobs: fileIds
.map(fileId => binaryForGitBlobHash(gitBlobHash(fileId)))
.sort(),
}
}
)
)
})
it('should have backed up all the files', async function () {
expect(tieringStorageClass).to.exist
const objects = await listS3Bucket(projectBlobsBucket, tieringStorageClass)
expect(objects.sort()).to.deep.equal(
writtenBlobs
.map(({ historyId, fileId, hash }) =>
makeProjectKey(historyId, hash || gitBlobHash(fileId))
)
.sort()
)
for (let { historyId, fileId } of writtenBlobs) {
const hash = gitBlobHash(fileId.toString())
const s = await backupPersistor.getObjectStream(
projectBlobsBucket,
makeProjectKey(historyId, hash),
{ autoGunzip: true }
)
const buf = new WritableBuffer()
await Stream.promises.pipeline(s, buf)
expect(gitBlobHashBuffer(buf.getContents())).to.equal(hash)
const id = buf.getContents().toString('utf-8')
expect(id).to.equal(fileId.toString())
// double check we are not comparing 'undefined' or '[object Object]' above
expect(id).to.match(/^[a-f0-9]{24}$/)
}
const deks = await listS3Bucket(deksBucket, 'STANDARD')
expect(deks.sort()).to.deep.equal(
Array.from(
new Set(
writtenBlobs.map(
({ historyId }) => projectKey.format(historyId) + '/dek'
)
)
).sort()
)
})
it('should have written the back filled files to history v1', async function () {
for (const { historyId, fileId } of writtenBlobs) {

View file

@ -0,0 +1,54 @@
import express from 'express'
class MockFilestore {
constructor() {
this.host = process.env.FILESTORE_HOST || '127.0.0.1'
this.port = process.env.FILESTORE_PORT || 3009
// create a server listening on this.host and this.port
this.files = {}
this.app = express()
this.app.get('/project/:projectId/file/:fileId', (req, res) => {
const { projectId, fileId } = req.params
const content = this.files[projectId]?.[fileId]
if (!content) return res.status(404).end()
res.status(200).end(content)
})
}
start() {
// reset stored files
this.files = {}
// start the server
if (this.serverPromise) {
return this.serverPromise
} else {
this.serverPromise = new Promise((resolve, reject) => {
this.server = this.app.listen(this.port, this.host, err => {
if (err) return reject(err)
resolve()
})
})
return this.serverPromise
}
}
addFile(projectId, fileId, fileContent) {
if (!this.files[projectId]) {
this.files[projectId] = {}
}
this.files[projectId][fileId] = fileContent
}
deleteObject(projectId, fileId) {
if (this.files[projectId]) {
delete this.files[projectId][fileId]
if (Object.keys(this.files[projectId]).length === 0) {
delete this.files[projectId]
}
}
}
}
export const mockFilestore = new MockFilestore()

View file

@ -42,7 +42,7 @@ services:
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -44,7 +44,7 @@ services:
command: npm run --silent test:acceptance
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -55,7 +55,7 @@ services:
retries: 20
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -57,7 +57,7 @@ services:
retries: 20
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -56,14 +56,8 @@ if (Settings.catchErrors) {
// Create ./data/dumpFolder if needed
FileWriter.ensureDumpFolderExists()
if (
!Features.hasFeature('project-history-blobs') &&
!Features.hasFeature('filestore')
) {
throw new Error(
'invalid config: must enable either project-history-blobs (Settings.enableProjectHistoryBlobs=true) or enable filestore (Settings.disableFilestore=false)'
)
}
// Validate combination of feature flags.
Features.validateSettings()
// handle SIGTERM for graceful shutdown in kubernetes
process.on('SIGTERM', function (signal) {

View file

@ -36,7 +36,22 @@ function send401WithChallenge(res) {
function checkCredentials(userDetailsMap, user, password) {
const expectedPassword = userDetailsMap.get(user)
const userExists = userDetailsMap.has(user) && expectedPassword // user exists with a non-null password
const isValid = userExists && tsscmp(expectedPassword, password)
let isValid = false
if (userExists) {
if (Array.isArray(expectedPassword)) {
const isValidPrimary = Boolean(
expectedPassword[0] && tsscmp(expectedPassword[0], password)
)
const isValidFallback = Boolean(
expectedPassword[1] && tsscmp(expectedPassword[1], password)
)
isValid = isValidPrimary || isValidFallback
} else {
isValid = tsscmp(expectedPassword, password)
}
}
if (!isValid) {
logger.err({ user }, 'invalid login details')
}

View file

@ -7,6 +7,7 @@ import logger from '@overleaf/logger'
import _ from 'lodash'
import { plainTextResponse } from '../../infrastructure/Response.js'
import { expressify } from '@overleaf/promise-utils'
import Modules from '../../infrastructure/Modules.js'
async function getDocument(req, res) {
const { Project_id: projectId, doc_id: docId } = req.params
@ -92,6 +93,9 @@ async function setDocument(req, res) {
{ docId, projectId },
'finished receiving set document request from api (docupdater)'
)
await Modules.promises.hooks.fire('docModified', projectId, docId)
res.json(result)
}

View file

@ -8,7 +8,7 @@ function projectHistoryURLWithFilestoreFallback(
) {
const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}`
// TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests.
if (fileRef.hash && Settings.enableProjectHistoryBlobs) {
if (fileRef.hash && Settings.filestoreMigrationLevel >= 1) {
return {
url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
fallbackURL: filestoreURL,

View file

@ -66,7 +66,7 @@ function uploadProject(req, res, next) {
async function uploadFile(req, res, next) {
const timer = new metrics.Timer('file-upload')
const name = req.body.name
const path = req.file?.path
const { path } = req.file
const projectId = req.params.Project_id
const userId = SessionManager.getLoggedInUserId(req.session)
let { folder_id: folderId } = req.query
@ -162,8 +162,14 @@ function multerMiddleware(req, res, next) {
.status(422)
.json({ success: false, error: req.i18n.translate('file_too_large') })
}
return next(err)
if (err) return next(err)
if (!req.file?.path) {
logger.info({ req }, 'missing req.file.path on upload')
return res
.status(400)
.json({ success: false, error: 'invalid_upload_request' })
}
next()
})
}

View file

@ -506,6 +506,12 @@ async function expireDeletedUsersAfterDuration(req, res, next) {
res.sendStatus(204)
}
async function listAllUsers(req, res, next) {
const users = await UserGetter.promises.getAllUsers()
res.json(users)
}
module.exports = {
clearSessions: expressify(clearSessions),
changePassword: expressify(changePassword),
@ -518,4 +524,5 @@ module.exports = {
expireDeletedUsersAfterDuration: expressify(expireDeletedUsersAfterDuration),
ensureAffiliationMiddleware: expressify(ensureAffiliationMiddleware),
ensureAffiliation,
listAllUsers: expressify(listAllUsers),
}

View file

@ -150,6 +150,44 @@ async function getWritefullData(userId) {
}
}
getTotalProjectStorageForUser = async function (userId) {
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
const { Project } = require('../../models/Project')
const fs = require('fs')
const path = require('path')
let totalsize = 0
// only owned projects, not shared
const ownedProjects = await Project.find(
{ owner_ref: userId },
"_id"
).exec()
for (let i = 0; i < ownedProjects.length; i++) {
const project = ownedProjects[i]
const files = await ProjectEntityHandler.promises.getAllFiles(project._id)
for (const [filePath, file] of Object.entries(files)) {
const f = path.join(settings.filestore.stores.user_files, project._id.toString() + '_' + file._id.toString())
const fstat = await fs.promises.stat(f)
const fsize = fstat.size
totalsize += fsize
}
} // foreach Project
return { count: ownedProjects.length, total: totalsize } // bytes
}
function formatBytes(bytes) {
const units = ['B', 'KB', 'MB', 'GB', 'TB']
let i = 0
while (bytes >= 1024 && i < units.length - 1) {
bytes /= 1024
i++
}
return `${bytes.toFixed(2)} ${units[i]}`
}
const UserGetter = {
getSsoUsersAtInstitution: callbackify(getSsoUsersAtInstitution),
@ -286,6 +324,43 @@ const UserGetter = {
})
},
getWritefullData: callbackify(getWritefullData),
getAllUsers(callback) {
const projection = {
_id: 1,
email: 1,
first_name: 1,
last_name: 1,
lastLoggedIn: 1,
signUpDate: 1,
loginCount: 1,
isAdmin: 1,
suspended: 1,
institution: 1,
}
const query = { $or: [{ 'emails.email': { $exists: true } },], }
db.users.find(query, {projection: projection}).toArray(async (err, users) => {
if (err) {
console.error('Error fetching users:', err)
return callback(err)
}
for (let i = 0; i < users.length; i++) {
const user = users[i]
user.signUpDateformatted = moment(user.signUpDate).format('DD/MM/YYYY')
user.lastLoggedInformatted = moment(user.lastLoggedIn).format('DD/MM/YYYY')
const ProjectsInfo = await getTotalProjectStorageForUser(user._id)
user.projectsSize = ProjectsInfo.total
user.projectsSizeFormatted = formatBytes(ProjectsInfo.total)
user.projectsCount = ProjectsInfo.count
}
callback(null, users)
})
}
}
const decorateFullEmails = (

View file

@ -19,8 +19,7 @@ const trackChangesModuleAvailable =
* @property {boolean | undefined} enableGithubSync
* @property {boolean | undefined} enableGitBridge
* @property {boolean | undefined} enableHomepage
* @property {boolean | undefined} enableProjectHistoryBlobs
* @property {boolean | undefined} disableFilestore
* @property {number} filestoreMigrationLevel
* @property {boolean | undefined} enableSaml
* @property {boolean | undefined} ldap
* @property {boolean | undefined} oauth
@ -30,6 +29,14 @@ const trackChangesModuleAvailable =
*/
const Features = {
validateSettings() {
if (![0, 1, 2].includes(Settings.filestoreMigrationLevel)) {
throw new Error(
`invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2`
)
}
},
/**
* @returns {boolean}
*/
@ -89,9 +96,9 @@ const Features = {
Settings.enabledLinkedFileTypes.includes('url')
)
case 'project-history-blobs':
return Boolean(Settings.enableProjectHistoryBlobs)
return Settings.filestoreMigrationLevel > 0
case 'filestore':
return Boolean(Settings.disableFilestore) === false
return Settings.filestoreMigrationLevel < 2
case 'support':
return supportModuleAvailable
case 'symbol-palette':

View file

@ -150,8 +150,7 @@ async function linkedFileAgentsIncludes() {
async function attachHooks() {
for (const module of await modules()) {
const { promises, ...hooks } = module.hooks || {}
for (const hook in promises || {}) {
const method = promises[hook]
for (const [hook, method] of Object.entries(promises || {})) {
attachHook(hook, method)
}
for (const hook in hooks || {}) {

View file

@ -1,13 +1,13 @@
section.cookie-banner.hidden-print.hidden(aria-label='Cookie banner')
.cookie-banner-content We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our <a href="/legal#Cookies">cookie policy</a>.
section.cookie-banner.hidden-print.hidden(aria-label=translate('cookie_banner'))
.cookie-banner-content !{translate('cookie_banner_info', {}, [{ name: 'a', attrs: { href: '/legal#Cookies' }}])}
.cookie-banner-actions
button(
type='button'
class='btn btn-link btn-sm'
data-ol-cookie-banner-set-consent='essential'
) Essential cookies only
) #{translate('essential_cookies_only')}
button(
type='button'
class='btn btn-primary btn-sm'
data-ol-cookie-banner-set-consent='all'
) Accept all cookies
) #{translate('accept_all_cookies')}

View file

@ -4,7 +4,7 @@ block vars
- var suppressNavbar = true
- var suppressFooter = true
- var suppressSkipToContent = true
- var suppressCookieBanner = true
- var suppressPugCookieBanner = true
block content
.content.content-alt

View file

@ -24,7 +24,7 @@ block body
else
include layout/fat-footer
if typeof suppressCookieBanner == 'undefined'
if typeof suppressPugCookieBanner == 'undefined'
include _cookie_banner
if bootstrapVersion === 5

View file

@ -69,5 +69,5 @@ block body
else
include layout/fat-footer-react-bootstrap-5
if typeof suppressCookieBanner === 'undefined'
if typeof suppressPugCookieBanner === 'undefined'
include _cookie_banner

View file

@ -27,7 +27,7 @@ block body
else
include layout/fat-footer-website-redesign
if typeof suppressCookieBanner == 'undefined'
if typeof suppressPugCookieBanner == 'undefined'
include _cookie_banner
block contactModal

View file

@ -66,6 +66,7 @@ nav.navbar.navbar-default.navbar-main.navbar-expand-lg(
if canDisplayAdminMenu
+dropdown-menu-link-item(href='/admin') Manage Site
+dropdown-menu-link-item(href='/admin/user') Manage Users
+dropdown-menu-link-item(href='/admin/users') #{translate('admin_panel')}
+dropdown-menu-link-item(href='/admin/project') Project URL Lookup
if canDisplayAdminRedirect
+dropdown-menu-link-item(href=settings.adminUrl) Switch to Admin

View file

@ -70,6 +70,8 @@ nav.navbar.navbar-default.navbar-main(
a(href='/admin') Manage Site
li
a(href='/admin/user') Manage Users
li
a(href='/admin/users') #{translate('admin_panel')}
li
a(href='/admin/project') Project URL Lookup
if canDisplayAdminRedirect

View file

@ -65,6 +65,8 @@ nav.navbar.navbar-default.navbar-main.website-redesign-navbar
a(href='/admin') Manage Site
li
a(href='/admin/user') Manage Users
li
a(href='/admin/users') #{translate('admin_panel')}
li
a(href='/admin/project') Project URL Lookup
if canDisplayAdminRedirect

View file

@ -2,7 +2,7 @@ extends ../../layout-marketing
block vars
- var suppressFooter = true
- var suppressCookieBanner = true
- var suppressPugCookieBanner = true
- var suppressSkipToContent = true
block content

View file

@ -7,7 +7,7 @@ block vars
- var suppressNavbar = true
- var suppressFooter = true
- var suppressSkipToContent = true
- var suppressCookieBanner = true
- var suppressPugCookieBanner = true
- metadata.robotsNoindexNofollow = true
block content

View file

@ -7,6 +7,7 @@ block vars
- const suppressNavContentLinks = true
- const suppressNavbar = true
- const suppressFooter = true
- const suppressPugCookieBanner = true
block append meta
meta(

View file

@ -5,7 +5,7 @@ block entrypointVar
block vars
- var suppressFooter = true
- var suppressCookieBanner = true
- var suppressPugCookieBanner = true
- var suppressSkipToContent = true
block append meta

View file

@ -5,7 +5,7 @@ block entrypointVar
block vars
- var suppressFooter = true
- var suppressCookieBanner = true
- var suppressPugCookieBanner = true
- var suppressSkipToContent = true
block append meta

View file

@ -440,6 +440,9 @@ module.exports = {
','
),
filestoreMigrationLevel:
parseInt(process.env.OVERLEAF_FILESTORE_MIGRATION_LEVEL, 10) || 0,
// i18n
// ------
//

View file

@ -95,7 +95,7 @@ services:
image: redis:7.4.3
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
logging:
driver: none
command: --replSet overleaf

View file

@ -91,7 +91,7 @@ services:
image: redis:7.4.3
mongo:
image: mongo:7.0.20
image: mongo:8.0.11
command: --replSet overleaf
volumes:
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js

View file

@ -35,6 +35,7 @@
"about_to_remove_user_preamble": "",
"about_to_trash_projects": "",
"abstract": "",
"accept_all_cookies": "",
"accept_and_continue": "",
"accept_change": "",
"accept_change_error_description": "",
@ -332,6 +333,8 @@
"continue_to": "",
"continue_using_free_features": "",
"continue_with_free_plan": "",
"cookie_banner": "",
"cookie_banner_info": "",
"copied": "",
"copy": "",
"copy_code": "",
@ -544,6 +547,7 @@
"error_opening_document_detail": "",
"error_performing_request": "",
"error_processing_file": "",
"essential_cookies_only": "",
"example_project": "",
"existing_plan_active_until_term_end": "",
"expand": "",
@ -863,6 +867,7 @@
"invalid_password_too_similar": "",
"invalid_regular_expression": "",
"invalid_request": "",
"invalid_upload_request": "",
"invite": "",
"invite_expired": "",
"invite_more_collabs": "",

View file

@ -1,53 +0,0 @@
import getMeta from '@/utils/meta'
function loadGA() {
if (window.olLoadGA) {
window.olLoadGA()
}
}
function setConsent(value) {
document.querySelector('.cookie-banner').classList.add('hidden')
const cookieDomain = getMeta('ol-ExposedSettings').cookieDomain
const oneYearInSeconds = 60 * 60 * 24 * 365
const cookieAttributes =
'; path=/' +
'; domain=' +
cookieDomain +
'; max-age=' +
oneYearInSeconds +
'; SameSite=Lax; Secure'
if (value === 'all') {
document.cookie = 'oa=1' + cookieAttributes
loadGA()
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: true }))
} else {
document.cookie = 'oa=0' + cookieAttributes
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: false }))
}
}
if (
getMeta('ol-ExposedSettings').gaToken ||
getMeta('ol-ExposedSettings').gaTokenV4 ||
getMeta('ol-ExposedSettings').propensityId ||
getMeta('ol-ExposedSettings').hotjarId
) {
document
.querySelectorAll('[data-ol-cookie-banner-set-consent]')
.forEach(el => {
el.addEventListener('click', function (e) {
e.preventDefault()
const consentType = el.getAttribute('data-ol-cookie-banner-set-consent')
setConsent(consentType)
})
})
const oaCookie = document.cookie.split('; ').find(c => c.startsWith('oa='))
if (!oaCookie) {
const cookieBannerEl = document.querySelector('.cookie-banner')
if (cookieBannerEl) {
cookieBannerEl.classList.remove('hidden')
}
}
}

View file

@ -0,0 +1,32 @@
import {
CookieConsentValue,
cookieBannerRequired,
hasMadeCookieChoice,
setConsent,
} from '@/features/cookie-banner/utils'
function toggleCookieBanner(hidden: boolean) {
const cookieBannerEl = document.querySelector('.cookie-banner')
if (cookieBannerEl) {
cookieBannerEl.classList.toggle('hidden', hidden)
}
}
if (cookieBannerRequired()) {
document
.querySelectorAll('[data-ol-cookie-banner-set-consent]')
.forEach(el => {
el.addEventListener('click', function (e) {
e.preventDefault()
toggleCookieBanner(true)
const consentType = el.getAttribute(
'data-ol-cookie-banner-set-consent'
) as CookieConsentValue | null
setConsent(consentType)
})
})
if (!hasMadeCookieChoice()) {
toggleCookieBanner(false)
}
}

View file

@ -0,0 +1,43 @@
import getMeta from '@/utils/meta'
export type CookieConsentValue = 'all' | 'essential'
function loadGA() {
if (window.olLoadGA) {
window.olLoadGA()
}
}
export function setConsent(value: CookieConsentValue | null) {
const cookieDomain = getMeta('ol-ExposedSettings').cookieDomain
const oneYearInSeconds = 60 * 60 * 24 * 365
const cookieAttributes =
'; path=/' +
'; domain=' +
cookieDomain +
'; max-age=' +
oneYearInSeconds +
'; SameSite=Lax; Secure'
if (value === 'all') {
document.cookie = 'oa=1' + cookieAttributes
loadGA()
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: true }))
} else {
document.cookie = 'oa=0' + cookieAttributes
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: false }))
}
}
export function cookieBannerRequired() {
const exposedSettings = getMeta('ol-ExposedSettings')
return Boolean(
exposedSettings.gaToken ||
exposedSettings.gaTokenV4 ||
exposedSettings.propensityId ||
exposedSettings.hotjarId
)
}
export function hasMadeCookieChoice() {
return document.cookie.split('; ').some(c => c.startsWith('oa='))
}

View file

@ -1,4 +1,4 @@
import { useTranslation } from 'react-i18next'
import { useTranslation, Trans } from 'react-i18next'
import { FetchError } from '../../../../infrastructure/fetch-json'
import RedirectToLogin from './redirect-to-login'
import {
@ -7,6 +7,7 @@ import {
InvalidFilenameError,
} from '../../errors'
import DangerMessage from './danger-message'
import getMeta from '@/utils/meta'
// TODO: Update the error type when we properly type FileTreeActionableContext
export default function ErrorMessage({
@ -15,6 +16,7 @@ export default function ErrorMessage({
error: string | Record<string, any>
}) {
const { t } = useTranslation()
const { isOverleaf } = getMeta('ol-ExposedSettings')
const fileNameLimit = 150
// the error is a string
@ -46,6 +48,22 @@ export default function ErrorMessage({
</DangerMessage>
)
case 'invalid_upload_request':
if (!isOverleaf) {
return (
<DangerMessage>{t('generic_something_went_wrong')}</DangerMessage>
)
}
return (
<DangerMessage>
<Trans
i18nKey="invalid_upload_request"
// eslint-disable-next-line jsx-a11y/anchor-has-content, react/jsx-key
components={[<a href="/contact" target="_blank" />]}
/>
</DangerMessage>
)
case 'duplicate_file_name':
return (
<DangerMessage>

View file

@ -1,15 +1,14 @@
import { MessageProps } from '@/features/chat/components/message'
import { User } from '../../../../../../types/user'
import { getHueForUserId } from '@/shared/utils/colors'
import {
getBackgroundColorForUserId,
hslStringToLuminance,
} from '@/shared/utils/colors'
import MessageContent from '@/features/chat/components/message-content'
import classNames from 'classnames'
import MaterialIcon from '@/shared/components/material-icon'
import { t } from 'i18next'
function hue(user?: User) {
return user ? getHueForUserId(user.id) : 0
}
function getAvatarStyle(user?: User) {
if (!user?.id) {
// Deleted user
@ -20,9 +19,15 @@ function getAvatarStyle(user?: User) {
}
}
const backgroundColor = getBackgroundColorForUserId(user.id)
return {
borderColor: `hsl(${hue(user)}, 85%, 40%)`,
backgroundColor: `hsl(${hue(user)}, 85%, 40%`,
borderColor: backgroundColor,
backgroundColor,
color:
hslStringToLuminance(backgroundColor) < 0.5
? 'var(--content-primary-dark)'
: 'var(--content-primary)',
}
}

View file

@ -7,7 +7,11 @@ import {
DropdownToggle,
} from '@/features/ui/components/bootstrap-5/dropdown-menu'
import OLTooltip from '@/features/ui/components/ol/ol-tooltip'
import { getBackgroundColorForUserId } from '@/shared/utils/colors'
import {
getBackgroundColorForUserId,
hslStringToLuminance,
} from '@/shared/utils/colors'
import classNames from 'classnames'
import { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
@ -86,9 +90,16 @@ const OnlineUserWidget = ({
const OnlineUserCircle = ({ user }: { user: OnlineUser }) => {
const backgroundColor = getBackgroundColorForUserId(user.user_id)
const luminance = hslStringToLuminance(backgroundColor)
const [character] = [...user.name]
return (
<span className="online-user-circle" style={{ backgroundColor }}>
<span
className={classNames('online-user-circle', {
'online-user-circle-light-font': luminance < 0.5,
'online-user-circle-dark-font': luminance >= 0.5,
})}
style={{ backgroundColor }}
>
{character}
</span>
)

View file

@ -20,6 +20,7 @@ import Footer from '@/features/ui/components/bootstrap-5/footer/footer'
import SidebarDsNav from '@/features/project-list/components/sidebar/sidebar-ds-nav'
import SystemMessages from '@/shared/components/system-messages'
import overleafLogo from '@/shared/svgs/overleaf-a-ds-solution-mallard.svg'
import CookieBanner from '@/shared/components/cookie-banner'
export function ProjectListDsNav() {
const navbarProps = getMeta('ol-navbar')
@ -125,6 +126,7 @@ export function ProjectListDsNav() {
</div>
<Footer {...footerProps} />
</div>
<CookieBanner />
</div>
</main>
</div>

View file

@ -18,6 +18,7 @@ import Footer from '@/features/ui/components/bootstrap-5/footer/footer'
import WelcomePageContent from '@/features/project-list/components/welcome-page-content'
import { ProjectListDsNav } from '@/features/project-list/components/project-list-ds-nav'
import { DsNavStyleProvider } from '@/features/project-list/components/use-is-ds-nav'
import CookieBanner from '@/shared/components/cookie-banner'
function ProjectListRoot() {
const { isReady } = useWaitForI18n()
@ -88,9 +89,12 @@ function ProjectListPageContent() {
if (totalProjectsCount === 0) {
return (
<DefaultPageContentWrapper>
<WelcomePageContent />
</DefaultPageContentWrapper>
<>
<DefaultPageContentWrapper>
<WelcomePageContent />
</DefaultPageContentWrapper>
<CookieBanner />
</>
)
}
return (

View file

@ -39,6 +39,9 @@ export default function AdminMenu({
<NavDropdownLinkItem href="/admin/user">
Manage Users
</NavDropdownLinkItem>
<NavDropdownLinkItem href="/admin/users">
Admin Panel
</NavDropdownLinkItem>
<NavDropdownLinkItem href="/admin/project">
Project URL lookup
</NavDropdownLinkItem>

View file

@ -0,0 +1,58 @@
import OLButton from '@/features/ui/components/ol/ol-button'
import { Trans, useTranslation } from 'react-i18next'
import React, { useState } from 'react'
import {
CookieConsentValue,
cookieBannerRequired,
hasMadeCookieChoice,
setConsent,
} from '@/features/cookie-banner/utils'
function CookieBanner() {
const { t } = useTranslation()
const [hidden, setHidden] = useState(
() => !cookieBannerRequired() || hasMadeCookieChoice()
)
function makeCookieChoice(value: CookieConsentValue) {
setConsent(value)
setHidden(true)
}
if (hidden) {
return null
}
return (
<section
className="cookie-banner hidden-print"
aria-label={t('cookie_banner')}
>
<div className="cookie-banner-content">
<Trans
i18nKey="cookie_banner_info"
// eslint-disable-next-line react/jsx-key, jsx-a11y/anchor-has-content
components={[<a href="/legal#Cookies" />]}
/>
</div>
<div className="cookie-banner-actions">
<OLButton
variant="link"
size="sm"
onClick={() => makeCookieChoice('essential')}
>
{t('essential_cookies_only')}
</OLButton>
<OLButton
variant="primary"
size="sm"
onClick={() => makeCookieChoice('all')}
>
{t('accept_all_cookies')}
</OLButton>
</div>
</section>
)
}
export default CookieBanner

View file

@ -34,6 +34,51 @@ export function getBackgroundColorForUserId(userId?: string) {
return `hsl(${getHueForUserId(userId)}, 70%, 50%)`
}
export function hslStringToLuminance(hslString: string): number {
// First extract the individual components from the HSL string
const hslSplit = hslString.slice(4).split(')')[0].split(',')
const h = Number(hslSplit[0])
const s = Number(hslSplit[1].slice(0, -1)) / 100
const l = Number(hslSplit[2].slice(0, -1)) / 100
// Then we need to convert HSL to RGB
const c = (1 - Math.abs(2 * l - 1)) * s
const x = c * (1 - Math.abs(((h / 60) % 2) - 1))
const m = l - c / 2
let r = 0
let g = 0
let b = 0
if (h >= 0 && h < 60) {
r = c + m
g = x + m
b = m
} else if (h >= 60 && h < 120) {
r = x + m
g = c + m
b = m
} else if (h >= 120 && h < 180) {
r = m
g = c + m
b = x + m
} else if (h >= 180 && h < 240) {
r = m
g = x + m
b = c + m
} else if (h >= 240 && h < 300) {
r = x + m
g = m
b = c + m
} else if (h >= 300 && h < 360) {
r = c + m
g = m
b = x + m
}
// Finally we calculate the luminance
return 0.2126 * r + 0.7152 * g + 0.0722 * b
}
const cachedHues = new Map()
export function getHueForId(id: string) {

View file

@ -124,4 +124,12 @@
box-sizing: border-box;
display: inline-block;
}
.online-user-circle-light-font {
color: var(--content-primary-dark);
}
.online-user-circle-dark-font {
color: var(--content-primary);
}
}

View file

@ -524,6 +524,10 @@ $z-index-group-member-picker-list: 1;
&[data-ol-plans-new-group-member-picker-button='group-all'] {
height: $group-member-picker-top-height;
}
.material-symbols {
pointer-events: none;
}
}
ul.plans-new-group-member-picker-list {

View file

@ -255,6 +255,12 @@
display: flex;
flex-direction: column;
> * {
@include media-breakpoint-up(md) {
border-left: 1px solid var(--border-divider);
}
}
.project-ds-nav-content {
flex-grow: 1;
overflow-y: auto;
@ -263,10 +269,20 @@
@include media-breakpoint-up(md) {
border-top-left-radius: var(--border-radius-large);
border-left: 1px solid var(--border-divider);
border-top: 1px solid var(--border-divider);
}
}
.cookie-banner {
position: static;
background-color: var(--bg-light-primary);
// Remove the parts of the shadow that stick out of the sides
clip-path: inset(-13px 0 0 0);
// Prevent the cookie banner being overlaid on top of the navigation
z-index: auto;
}
}
}

View file

@ -38,6 +38,7 @@
"about_to_trash_projects": "You are about to trash the following projects:",
"abstract": "Abstract",
"accept": "Accept",
"accept_all_cookies": "Accept all cookies",
"accept_and_continue": "Accept and continue",
"accept_change": "Accept change",
"accept_change_error_description": "There was an error accepting a track change. Please try again in a few moments.",
@ -433,6 +434,8 @@
"continue_using_free_features": "Continue using our free features",
"continue_with_free_plan": "Continue with free plan",
"continue_with_service": "Continue with __service__",
"cookie_banner": "Cookie banner",
"cookie_banner_info": "We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our <0>cookie policy</0>.",
"copied": "Copied",
"copy": "Copy",
"copy_code": "Copy code",
@ -612,6 +615,7 @@
"dropbox_synced": "Overleaf and Dropbox have processed all updates. Note that your local Dropbox might still be synchronizing",
"dropbox_unlinked_because_access_denied": "Your Dropbox account has been unlinked because the Dropbox service rejected your stored credentials. Please relink your Dropbox account to continue using it with Overleaf.",
"dropbox_unlinked_because_full": "Your Dropbox account has been unlinked because it is full, and we can no longer send updates to it. Please free up some space and relink your Dropbox account to continue using it with Overleaf.",
"dropbox_unlinked_because_suspended": "Weve unlinked your Dropbox account because its been suspended by Dropbox. Youll be able to relink once youve resolved the issue with Dropbox.",
"dropbox_unlinked_premium_feature": "<0>Your Dropbox account has been unlinked</0> because Dropbox Sync is a premium feature that you had through an institutional license.",
"due_date": "Due __date__",
"due_today": "Due today",
@ -700,6 +704,7 @@
"error_performing_request": "An error has occurred while performing your request.",
"error_processing_file": "Sorry, something went wrong processing this file. Please try again.",
"es": "Spanish",
"essential_cookies_only": "Essential cookies only",
"estimated_number_of_overleaf_users": "Estimated number of __appName__ users",
"every": "per",
"everything_in_free_plus": "Everything in Free, plus…",
@ -1107,6 +1112,7 @@
"invalid_password_too_similar": "Password is too similar to parts of email address",
"invalid_regular_expression": "Invalid regular expression",
"invalid_request": "Invalid Request. Please correct the data and try again.",
"invalid_upload_request": "The upload failed. If the problem persists, <0>let us know</0>.",
"invalid_zip_file": "Invalid zip file",
"invite": "Invite",
"invite_expired": "The invite may have expired",

View file

@ -8,7 +8,6 @@ import _ from 'lodash'
import ProjectGetter from '../../../../../app/src/Features/Project/ProjectGetter.js'
import User from '../../../../../test/acceptance/src/helpers/User.mjs'
import MockDocUpdaterApiClass from '../../../../../test/acceptance/src/mocks/MockDocUpdaterApi.mjs'
import Features from '../../../../../app/src/infrastructure/Features.js'
const { ObjectId } = mongodb
@ -188,32 +187,25 @@ describe('ProjectStructureChanges', function () {
const cases = [
{
label: 'with filestore disabled and project-history-blobs enabled',
disableFilestore: true,
enableProjectHistoryBlobs: true,
filestoreMigrationLevel: 2,
},
{
label: 'with filestore enabled and project-history-blobs enabled',
disableFilestore: false,
enableProjectHistoryBlobs: true,
filestoreMigrationLevel: 1,
},
{
label: 'with filestore enabled and project-history-blobs disabled',
disableFilestore: false,
enableProjectHistoryBlobs: false,
filestoreMigrationLevel: 0,
},
]
for (const { label, disableFilestore, enableProjectHistoryBlobs } of cases) {
for (const { label, filestoreMigrationLevel } of cases) {
describe(label, function () {
const previousDisableFilestore = Settings.disableFilestore
const previousEnableProjectHistoryBlobs =
Settings.enableProjectHistoryBlobs
const previousFilestoreMigrationLevel = Settings.filestoreMigrationLevel
beforeEach(function () {
Settings.disableFilestore = disableFilestore
Settings.enableProjectHistoryBlobs = enableProjectHistoryBlobs
Settings.filestoreMigrationLevel = filestoreMigrationLevel
})
afterEach(function () {
Settings.disableFilestore = previousDisableFilestore
Settings.enableProjectHistoryBlobs = previousEnableProjectHistoryBlobs
Settings.filestoreMigrationLevel = previousFilestoreMigrationLevel
})
describe('creating a project from the example template', function () {
@ -244,7 +236,7 @@ describe('ProjectStructureChanges', function () {
expect(updates[2].type).to.equal('add-file')
expect(updates[2].userId).to.equal(owner._id)
expect(updates[2].pathname).to.equal('/frog.jpg')
if (disableFilestore) {
if (filestoreMigrationLevel === 2) {
expect(updates[2].url).to.not.exist
expect(updates[2].createdBlob).to.be.true
} else {
@ -301,10 +293,10 @@ describe('ProjectStructureChanges', function () {
expect(updates[2].type).to.equal('add-file')
expect(updates[2].userId).to.equal(owner._id)
expect(updates[2].pathname).to.equal('/frog.jpg')
if (disableFilestore) {
if (filestoreMigrationLevel === 2) {
expect(updates[2].url).to.not.exist
expect(updates[2].createdBlob).to.be.true
} else if (Features.hasFeature('project-history-blobs')) {
} else if (filestoreMigrationLevel === 1) {
expect(updates[2].url).to.be.null
} else {
expect(updates[2].url).to.be.a('string')
@ -378,7 +370,7 @@ describe('ProjectStructureChanges', function () {
expect(updates[1].type).to.equal('add-file')
expect(updates[1].userId).to.equal(owner._id)
expect(updates[1].pathname).to.equal('/1pixel.png')
if (disableFilestore) {
if (filestoreMigrationLevel === 2) {
expect(updates[1].url).to.not.exist
expect(updates[1].createdBlob).to.be.true
} else {
@ -478,7 +470,7 @@ describe('ProjectStructureChanges', function () {
expect(update.type).to.equal('add-file')
expect(update.userId).to.equal(owner._id)
expect(update.pathname).to.equal('/1pixel.png')
if (disableFilestore) {
if (filestoreMigrationLevel === 2) {
expect(update.url).to.not.exist
expect(update.createdBlob).to.be.true
} else {
@ -516,7 +508,7 @@ describe('ProjectStructureChanges', function () {
expect(updates[1].type).to.equal('add-file')
expect(updates[1].userId).to.equal(owner._id)
expect(updates[1].pathname).to.equal('/1pixel.png')
if (disableFilestore) {
if (filestoreMigrationLevel === 2) {
expect(updates[1].url).to.not.exist
expect(updates[1].createdBlob).to.be.true
} else {
@ -1005,7 +997,7 @@ describe('ProjectStructureChanges', function () {
expect(update.type).to.equal('add-file')
expect(update.userId).to.equal(owner._id)
expect(update.pathname).to.equal('/1pixel.png')
if (disableFilestore) {
if (filestoreMigrationLevel === 2) {
expect(update.url).to.not.exist
expect(update.createdBlob).to.be.true
} else {
@ -1068,7 +1060,7 @@ describe('ProjectStructureChanges', function () {
expect(updates[1].type).to.equal('add-file')
expect(updates[1].userId).to.equal(owner._id)
expect(updates[1].pathname).to.equal('/1pixel.png')
if (disableFilestore) {
if (filestoreMigrationLevel === 2) {
expect(updates[1].url).to.not.exist
expect(updates[1].createdBlob).to.be.true
} else {

View file

@ -62,8 +62,88 @@ async function activateAccountPage(req, res, next) {
})
}
//
async function listAllUsers(req, res, next) {
const users = await UserGetter.promises.getAllUsers()
res.render(Path.resolve(__dirname, '../views/user/list'), {
title: 'Users list',
users,
currentUserId: req.user._id,
_csrf: req.csrfToken(),
})
}
import UserUpdater from '../../../../app/src/Features/User/UserUpdater.js'
/*
* it is a modified copy of /overleaf/services/web/scripts/suspend_users.mjs
* @param {request} req
* @param {response} res
*/
async function suspendUser(req, res) {
const userId = req.params.userId
try {
await UserUpdater.promises.suspendUser(userId, {
initiatorId: userId,
ip: req.ip,
info: { script: false },
})
} catch (error) {
console.log(`Failed to suspend ${userId}`, error)
}
res.redirect('/admin/users')
}
/*
* it is a modified copy of UserUpdater.suspendUser
* @param {request} req
* @param {response} res
*/
async function unsuspendUser(req, res) {
const userId = req.params.userId
const upd = await UserUpdater.promises.updateUser(
{ _id: userId, suspended: { $ne: false } },
{ $set: { suspended: false } }
)
if (upd.matchedCount !== 1) {
console.log('user id not found or already unsuspended')
}
res.redirect('/admin/users')
}
/*
* it is a modified copy of UserUpdater.suspendUser
* It is used to update user first and last name
* @param {request} req.body.userId
* @param {request} req.body.first_name
* @param {request} req.body.last_name
* @param {response} res
*/
async function updateUser(req, res) {
const { userId, first_name, last_name } = req.body;
const upd = await UserUpdater.promises.updateUser(
{ _id: userId },
{ $set: {
first_name: first_name,
last_name: last_name,
} }
)
if (upd.matchedCount !== 1) {
console.log(`user id not found ${userId}`)
} else {
res.json({ success: true });
}
}
export default {
registerNewUser,
register: expressify(register),
activateAccountPage: expressify(activateAccountPage),
listAllUsers: expressify(listAllUsers),
suspendUser: expressify(suspendUser),
unsuspendUser: expressify(unsuspendUser),
updateUser: expressify(updateUser),
}

View file

@ -26,5 +26,21 @@ export default {
AuthorizationMiddleware.ensureUserIsSiteAdmin,
UserActivateController.register
)
webRouter.get('/admin/users',
AuthorizationMiddleware.ensureUserIsSiteAdmin,
UserActivateController.listAllUsers
)
webRouter.post('/admin/users/:userId/suspend',
AuthorizationMiddleware.ensureUserIsSiteAdmin,
UserActivateController.suspendUser
)
webRouter.post('/admin/users/:userId/unsuspend',
AuthorizationMiddleware.ensureUserIsSiteAdmin,
UserActivateController.unsuspendUser
)
webRouter.post('/admin/users/settings',
AuthorizationMiddleware.ensureUserIsSiteAdmin,
UserActivateController.updateUser
)
},
}

View file

@ -0,0 +1,258 @@
extends ../../../../../app/views/layout-react
block append meta
meta(name="ol-usersBestSubscription" data-type="json" content=usersBestSubscription)
meta(name="ol-notifications" data-type="json" content=notifications)
meta(name="ol-notificationsInstitution" data-type="json" content=notificationsInstitution)
meta(name="ol-userEmails" data-type="json" content=userEmails)
meta(name="ol-allInReconfirmNotificationPeriods" data-type="json" content=allInReconfirmNotificationPeriods)
meta(name="ol-user" data-type="json" content=user)
meta(name="ol-userAffiliations" data-type="json" content=userAffiliations)
meta(name="ol-reconfirmedViaSAML" content=reconfirmedViaSAML)
meta(name="ol-survey" data-type="json" content=survey)
meta(name="ol-tags" data-type="json" content=tags)
meta(name="ol-portalTemplates" data-type="json" content=portalTemplates)
meta(name="ol-prefetchedProjectsBlob" data-type="json" content=prefetchedProjectsBlob)
if (suggestedLanguageSubdomainConfig)
meta(name="ol-suggestedLanguage" data-type="json" content=Object.assign(suggestedLanguageSubdomainConfig, {
lngName: translate(suggestedLanguageSubdomainConfig.lngCode),
imgUrl: buildImgPath("flags/24/" + suggestedLanguageSubdomainConfig.lngCode + ".png")
}))
meta(name="ol-currentUrl" data-type="string" content=currentUrl)
meta(name="ol-showGroupsAndEnterpriseBanner" data-type="boolean" content=showGroupsAndEnterpriseBanner)
meta(name="ol-groupsAndEnterpriseBannerVariant" data-type="string" content=groupsAndEnterpriseBannerVariant)
meta(name="ol-showInrGeoBanner" data-type="boolean" content=showInrGeoBanner)
meta(name="ol-showBrlGeoBanner" data-type="boolean" content=showBrlGeoBanner)
meta(name="ol-recommendedCurrency" data-type="string" content=recommendedCurrency)
meta(name="ol-showLATAMBanner" data-type="boolean" content=showLATAMBanner)
meta(name="ol-groupSubscriptionsPendingEnrollment" data-type="json" content=groupSubscriptionsPendingEnrollment)
meta(name="ol-hasIndividualRecurlySubscription" data-type="boolean" content=hasIndividualRecurlySubscription)
meta(name="ol-groupSsoSetupSuccess" data-type="boolean" content=groupSsoSetupSuccess)
meta(name="ol-showUSGovBanner" data-type="boolean" content=showUSGovBanner)
meta(name="ol-usGovBannerVariant" data-type="string" content=usGovBannerVariant)
block css
style.
.edit-icon {
display: none;
}
.user-name:hover .edit-icon {
display: inline-block;
}
.edit-save-icon, .edit-cancel-icon {
margin-left: 5px;
}
.form-control:focus {
box-shadow: 0 0 0 2px rgba(0, 123, 255, 0.25); /* Bootstrap-style focus */
}
/* Ensure consistent width during editing */
.user-name {
display: block;
width: 100%;
min-width: 200px;
}
/* Fixed width for name column */
table td:nth-child(3),
table th:nth-child(3) {
width: 200px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
block append meta
link(rel='stylesheet', href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.10.5/font/bootstrap-icons.css", id="main-stylesheet")
block content
.content.content-alt#main-content
.container
.row
.col-sm-12
.card
.card-body
.page-header
h1 #{translate('admin_panel')}
if users.length
table.table.table-striped
thead
tr
th #{translate('email')}
th #{translate('name')}
th #{translate('institution')}
th #{translate('sign_up')} #{translate('date')}
th #{translate('login_count')}
th #{translate('last_logged_in')}
th #{translate('projects_count')}
th #{translate('file_size')}
th #{translate('role')}
th #{translate('status')}
tbody
each user in users
tr
td #{user.email}
td
div.user-name(
data-user-id=user._id
data-original-name=(user.first_name || '') + ', ' + (user.last_name || '')
style="width: 200px; min-width: 200px;"
)
span.name-text #{user.first_name || ''}, #{user.last_name || ''}
i.bi.bi-pencil.edit-icon(
title=translate('edit'),
style="cursor: pointer; margin-left: 5px;"
data-user-id=user._id
)
td #{user.institution || ''}
td #{user.signUpDateformatted ? user.signUpDateformatted: ''}
td #{user.loginCount || ''}
td #{user.lastLoggedInformatted ? user.lastLoggedInformatted: ''}
td
span.me-2 #{user.projectsCount || 0}
td #{user.projectsSizeFormatted || 0}
td #{user.isAdmin ? translate('admin') : ''}
td
span.badge(class=(user.suspended ? 'bg-secondary' : 'bg-success'))
#{user.suspended ? translate('suspended') : translate('active')}
if user._id.toString() !== currentUserId
form.d-inline(method="POST", action=`/admin/users/${user._id}/${user.suspended ? 'unsuspend' : 'suspend'}`)
input(type="hidden", name="_csrf", value=csrfToken)
button.btn.btn-sm(
class=(user.suspended ? 'btn-danger' : 'btn-success'),
type="submit",
data-bs-toggle="tooltip",
title=(user.suspended ? translate('activate') : translate('suspend')),
) #{user.suspended ? translate('suspended') : translate('active')}
else
p There are no registered users.
block head-scripts
script(type="text/javascript", nonce=scriptNonce).
// Enable editing functionality for user names
function enableEdit(icon) {
// Prevent editing multiple rows
if (currentlyEditingRow != null) { return; }
const container = icon.parentElement;
const userId = icon.getAttribute("data-user-id");
const originalName = container.getAttribute("data-original-name");
[firstName, lastName] = originalName.split(",").map(str => str.trim());
if (lastName === undefined) { lastName = ''; }
// Save reference to current row being edited
currentlyEditingRow = container;
// Replace content with editable inputs and buttons
container.innerHTML = `
<input type="text" class="form-control form-control-sm d-inline-block w-auto me-1" value="${firstName}" data-first-name>
<input type="text" class="form-control form-control-sm d-inline-block w-auto me-1" value="${lastName}" data-last-name>
<i class="bi bi-save edit-save-icon" title="#{translate('save')}" style="cursor: pointer;" data-user-id="${userId}"></i>
<i class="bi bi-x-circle edit-cancel-icon" title="#{translate('cancel')}" style="cursor: pointer; margin-left: 5px;" data-user-id="${userId}"></i>
`;
// Add Enter key listener to input fields
const firstInput = container.querySelector('[data-first-name]');
const lastInput = container.querySelector('[data-last-name]');
// Function to trigger save on Enter or discard on Escape
const handleKeys = (e) => {
if (e.key === 'Enter') {
e.preventDefault(); // Prevent form submission
saveChanges(container, userId);
}
if (e.key === 'Escape') {
e.preventDefault(); // Prevent form submission
const container = currentlyEditingRow;
const userId = container.getAttribute("data-user-id");
discardChanges(container.firstChild, userId);
}
};
// Attach event listener to both input fields
firstInput.addEventListener('keydown', handleKeys);
lastInput.addEventListener('keydown', handleKeys);
firstInput.focus();
}
script(type="text/javascript", nonce=scriptNonce).
// Save changes made to the user name
function saveChanges(icon, userId) {
const container = icon.parentElement;
const firstName = container.querySelector('[data-first-name]').value;
const lastName = container.querySelector('[data-last-name]').value;
const csrfToken = document.querySelector('input[name="_csrf"]').value;
fetch('/admin/users/settings', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
body: JSON.stringify({
userId: userId,
first_name: firstName,
last_name: lastName,
_csrf: csrfToken
}),
credentials: 'same-origin'
})
.then(response => {
if (!response.ok) throw new Error('Error en la solicitud');
return response.json();
})
.then(data => {
// Restore original view
icon.innerHTML = `
<span class="name-text">${firstName}, ${lastName}</span>
<i class="bi bi-pencil edit-icon" style="cursor: pointer; margin-left: 5px;" data-user-id="${userId}"></i>
`;
icon.setAttribute("data-original-name", `${firstName}, ${lastName}`);
// Clear editing reference
currentlyEditingRow = null;
})
.catch(error => {
console.error('Error:', error);
});
}
script(type="text/javascript", nonce=scriptNonce).
function discardChanges(icon, userId) {
const container = icon.parentElement;
const originalName = container.getAttribute("data-original-name");
// Restore original view
container.innerHTML = `
<span class="name-text">${originalName}</span>
<i class="bi bi-pencil edit-icon" style="cursor: pointer; margin-left: 5px;" data-user-id="${userId}"></i>
`;
// Clear editing reference
currentlyEditingRow = null;
}
script(type="text/javascript", nonce=scriptNonce).
document.addEventListener("DOMContentLoaded", function() {
currentlyEditingRow = null;
document.addEventListener("click", (event) => {
if (event.target.classList.contains("edit-icon")) {
enableEdit(event.target);
} else if (event.target.classList.contains("edit-save-icon")) {
const userId = event.target.getAttribute("data-user-id");
saveChanges(event.target, userId);
} else if (event.target.classList.contains("edit-cancel-icon")) {
const userId = event.target.getAttribute("data-user-id");
discardChanges(event.target, userId);
}
});
})
script(type="text/javascript", nonce=scriptNonce).
document.querySelectorAll(".edit-icon").forEach(icon => {
icon.addEventListener("click", () => enableEdit(icon));
})

View file

@ -74,6 +74,7 @@ async function main() {
'dropbox_email_not_verified',
'dropbox_unlinked_because_access_denied',
'dropbox_unlinked_because_full',
'dropbox_unlinked_because_suspended',
// Actually used without the spurious space.
// TODO: fix the space and upload the changed locales

View file

@ -69,10 +69,7 @@ describe('PrimaryEmailCheck', function () {
$set: { lastPrimaryEmailCheck: new Date(time) },
})
await userHelper.confirmEmail(
userHelper.user._id,
userHelper.user.email
)
await userHelper.confirmEmail(userHelper.user.email)
})
it("shouldn't be redirected from project list to the primary email check page", async function () {
@ -153,10 +150,7 @@ describe('PrimaryEmailCheck', function () {
$set: { lastPrimaryEmailCheck: new Date(time) },
})
await userHelper.confirmEmail(
userHelper.user._id,
userHelper.user.email
)
await userHelper.confirmEmail(userHelper.user.email)
})
it("shouldn't be redirected from project list to the primary email check page", async function () {
@ -219,14 +213,8 @@ describe('PrimaryEmailCheck', function () {
})
beforeEach(async function () {
await userHelper.confirmEmail(
userHelper.user._id,
userHelper.user.email
)
await userHelper.addEmailAndConfirm(
userHelper.user._id,
'secondary@overleaf.com'
)
await userHelper.confirmEmail(userHelper.user.email)
await userHelper.addEmailAndConfirm('secondary@overleaf.com')
checkResponse = await userHelper.fetch(
'/user/emails/primary-email-check',

View file

@ -138,6 +138,47 @@ describe('ProjectStructureChanges', function () {
})
})
describe('when sending an upload request without a file', function () {
describe('project', function () {
it('should reject the request with status 400', async function () {
const { response, body } = await owner.doRequest('POST', {
uri: 'project/new/upload',
json: true,
formData: {
name: 'foo',
},
})
expect(response.statusCode).to.equal(400)
expect(body).to.deep.equal({
success: false,
error: 'invalid_upload_request',
})
})
})
describe('file', function () {
it('should reject the request with status 400', async function () {
const projectId = await owner.createProject('foo', {
template: 'blank',
})
const { response, body } = await owner.doRequest('POST', {
uri: `project/${projectId}/upload`,
json: true,
formData: {
name: 'foo.txt',
},
})
expect(response.statusCode).to.equal(400)
expect(body).to.deep.equal({
success: false,
error: 'invalid_upload_request',
})
})
})
})
describe('uploading an empty zipfile', function () {
let res

View file

@ -162,7 +162,7 @@ class UserHelper {
/**
*
* @param {'pendingExistingEmail'|'pendingUserRegistration'}sessionKey
* @param {'pendingExistingEmail'|'pendingUserRegistration'|'pendingSecondaryEmail'}sessionKey
* @return {Promise<*>}
*/
async getEmailConfirmationCode(sessionKey) {
@ -431,16 +431,16 @@ class UserHelper {
}
async addEmail(email) {
const response = await this.fetch('/user/emails', {
const response = await this.fetch('/user/emails/secondary', {
method: 'POST',
body: new URLSearchParams([['email', email]]),
})
await throwIfErrorResponse(response)
}
async addEmailAndConfirm(userId, email) {
async addEmailAndConfirm(email) {
await this.addEmail(email)
await this.confirmEmail(userId, email)
await this.confirmSecondaryEmail()
}
async changeConfirmationDate(userId, email, date) {
@ -499,9 +499,9 @@ class UserHelper {
await this.changeConfirmationDate(userId, email, date)
}
async confirmEmail(userId, email) {
async confirmEmail(email) {
// clear ratelimiting on resend confirmation endpoint
await rateLimiters.sendConfirmation.delete(userId)
await rateLimiters.sendConfirmation.delete(this.user._id)
const requestConfirmationCode = await this.fetch(
'/user/emails/send-confirmation-code',
{
@ -517,6 +517,25 @@ class UserHelper {
})
await throwIfErrorResponse(requestConfirmCode)
}
async confirmSecondaryEmail() {
const code = await this.getEmailConfirmationCode('pendingSecondaryEmail')
const requestConfirmCode = await this.fetch(
'/user/emails/confirm-secondary',
{
method: 'POST',
body: new URLSearchParams({ code }),
}
)
await throwIfErrorResponse(requestConfirmCode)
}
async unconfirmEmail(email) {
await UserUpdater.promises.updateUser(
{ _id: this.user._id, 'emails.email': email.toLowerCase() },
{ $unset: { 'emails.$.confirmedAt': 1, 'emails.$.reconfirmedAt': 1 } }
)
}
}
export default UserHelper

View file

@ -1500,4 +1500,331 @@ describe('AuthenticationController', function () {
})
})
})
describe('checkCredentials', function () {
beforeEach(function () {
this.userDetailsMap = new Map()
this.logger.err = sinon.stub()
this.Metrics.inc = sinon.stub()
})
describe('with valid credentials', function () {
describe('single password', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', 'correctpassword')
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'correctpassword'
)
})
it('should return true', function () {
this.result.should.equal(true)
})
it('should not log an error', function () {
this.logger.err.called.should.equal(false)
})
it('should record success metrics', function () {
this.Metrics.inc.should.have.been.calledWith(
'security.http-auth.check-credentials',
1,
{
path: 'known-user',
status: 'pass',
}
)
})
})
describe('array with primary password', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', ['primary', 'fallback'])
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'primary'
)
})
it('should return true', function () {
this.result.should.equal(true)
})
it('should not log an error', function () {
this.logger.err.called.should.equal(false)
})
it('should record success metrics', function () {
this.Metrics.inc.should.have.been.calledWith(
'security.http-auth.check-credentials',
1,
{
path: 'known-user',
status: 'pass',
}
)
})
})
describe('array with fallback password', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', ['primary', 'fallback'])
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'fallback'
)
})
it('should return true', function () {
this.result.should.equal(true)
})
it('should not log an error', function () {
this.logger.err.called.should.equal(false)
})
it('should record success metrics', function () {
this.Metrics.inc.should.have.been.calledWith(
'security.http-auth.check-credentials',
1,
{
path: 'known-user',
status: 'pass',
}
)
})
})
})
describe('with invalid credentials', function () {
describe('unknown user', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', 'correctpassword')
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'unknownuser',
'anypassword'
)
})
it('should return false', function () {
this.result.should.equal(false)
})
it('should log an error', function () {
this.logger.err.should.have.been.calledWith(
{ user: 'unknownuser' },
'invalid login details'
)
})
it('should record failure metrics', function () {
this.Metrics.inc.should.have.been.calledWith(
'security.http-auth.check-credentials',
1,
{
path: 'unknown-user',
status: 'fail',
}
)
})
})
describe('wrong password', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', 'correctpassword')
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'wrongpassword'
)
})
it('should return false', function () {
this.result.should.equal(false)
})
it('should log an error', function () {
this.logger.err.should.have.been.calledWith(
{ user: 'testuser' },
'invalid login details'
)
})
it('should record failure metrics', function () {
this.Metrics.inc.should.have.been.calledWith(
'security.http-auth.check-credentials',
1,
{
path: 'known-user',
status: 'fail',
}
)
})
})
describe('wrong password with array', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', ['primary', 'fallback'])
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'wrongpassword'
)
})
it('should return false', function () {
this.result.should.equal(false)
})
it('should log an error', function () {
this.logger.err.should.have.been.calledWith(
{ user: 'testuser' },
'invalid login details'
)
})
it('should record failure metrics', function () {
this.Metrics.inc.should.have.been.calledWith(
'security.http-auth.check-credentials',
1,
{
path: 'known-user',
status: 'fail',
}
)
})
})
describe('null user entry', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', null)
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'anypassword'
)
})
it('should return false', function () {
this.result.should.equal(false)
})
it('should log an error', function () {
this.logger.err.should.have.been.calledWith(
{ user: 'testuser' },
'invalid login details'
)
})
it('should record failure metrics for unknown user', function () {
this.Metrics.inc.should.have.been.calledWith(
'security.http-auth.check-credentials',
1,
{
path: 'unknown-user',
status: 'fail',
}
)
})
})
describe('empty primary password in array', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', ['', 'fallback'])
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'fallback'
)
})
it('should return true with fallback password', function () {
this.result.should.equal(true)
})
it('should not log an error', function () {
this.logger.err.called.should.equal(false)
})
})
describe('empty fallback password in array', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', ['primary', ''])
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'primary'
)
})
it('should return true with primary password', function () {
this.result.should.equal(true)
})
it('should not log an error', function () {
this.logger.err.called.should.equal(false)
})
})
describe('both passwords empty in array', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', ['', ''])
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'anypassword'
)
})
it('should return false', function () {
this.result.should.equal(false)
})
it('should log an error', function () {
this.logger.err.should.have.been.calledWith(
{ user: 'testuser' },
'invalid login details'
)
})
})
describe('empty single password', function () {
beforeEach(function () {
this.userDetailsMap.set('testuser', '')
this.result = this.AuthenticationController.checkCredentials(
this.userDetailsMap,
'testuser',
'anypassword'
)
})
it('should return false', function () {
this.result.should.equal(false)
})
it('should log an error', function () {
this.logger.err.should.have.been.calledWith(
{ user: 'testuser' },
'invalid login details'
)
})
it('should record failure metrics for unknown user', function () {
this.Metrics.inc.should.have.been.calledWith(
'security.http-auth.check-credentials',
1,
{
path: 'unknown-user',
status: 'fail',
}
)
})
})
})
})
})

View file

@ -29,6 +29,7 @@ describe('DocumentUpdaterHandler', function () {
url: 'http://project_history.example.com',
},
},
filestoreMigrationLevel: 0,
moduleImportSequence: [],
}
this.source = 'dropbox'
@ -1491,7 +1492,7 @@ describe('DocumentUpdaterHandler', function () {
describe('with filestore disabled', function () {
beforeEach(function () {
this.settings.disableFilestore = true
this.settings.filestoreMigrationLevel = 2
})
it('should add files without URL and with createdBlob', async function () {
this.fileId = new ObjectId()
@ -1700,7 +1701,7 @@ describe('DocumentUpdaterHandler', function () {
})
describe('with filestore disabled', function () {
beforeEach(function () {
this.settings.disableFilestore = true
this.settings.filestoreMigrationLevel = 2
})
it('should add files without URL', async function () {
const fileId1 = new ObjectId()

View file

@ -87,6 +87,14 @@ describe('DocumentController', function () {
},
}
ctx.Modules = {
promises: {
hooks: {
fire: sinon.stub().resolves(),
},
},
}
vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({
default: ctx.ProjectGetter,
}))
@ -113,6 +121,10 @@ describe('DocumentController', function () {
default: ctx.ChatApiHandler,
}))
vi.doMock('../../../../app/src/infrastructure/Modules.js', () => ({
default: ctx.Modules,
}))
ctx.DocumentController = (await import(MODULE_PATH)).default
})
@ -208,6 +220,15 @@ describe('DocumentController', function () {
it('should return a successful response', function (ctx) {
ctx.res.success.should.equal(true)
})
it('should call the docModified hook', function (ctx) {
sinon.assert.calledWith(
ctx.Modules.promises.hooks.fire,
'docModified',
ctx.project._id,
ctx.doc._id
)
})
})
describe("when the document doesn't exist", function () {

View file

@ -50,7 +50,7 @@ describe('ReferencesHandler', function () {
filestore: { url: 'http://some.url/filestore' },
project_history: { url: 'http://project-history.local' },
},
enableProjectHistoryBlobs: true,
filestoreMigrationLevel: 2,
}),
}))

View file

@ -39,6 +39,7 @@ describe('SplitTestHandler', function () {
}
this.SplitTestCache.get.resolves(this.cachedSplitTests)
this.Settings = {
filestoreMigrationLevel: 0,
moduleImportSequence: [],
overleaf: {},
devToolbar: {

View file

@ -57,7 +57,7 @@ describe('TpdsUpdateSender', function () {
url: projectHistoryUrl,
},
},
enableProjectHistoryBlobs: true,
filestoreMigrationLevel: true,
}
const getUsers = sinon.stub()
getUsers

View file

@ -7,6 +7,7 @@ describe('Features', function () {
this.Features = SandboxedModule.require(modulePath, {
requires: {
'@overleaf/settings': (this.settings = {
filestoreMigrationLevel: 0,
moduleImportSequence: [],
enabledLinkedFileTypes: [],
}),

View file

@ -1,3 +1,10 @@
export type AdminCapability = 'modify-user-email' | 'view-project'
export type AdminRole = 'engineering'
export type AdminRole =
| 'engagement'
| 'engineering'
| 'finance'
| 'product'
| 'sales'
| 'support'
| 'support_tier_1'

View file

@ -53,7 +53,10 @@ export type WebModule = {
apply: (webRouter: any, privateApiRouter: any, publicApiRouter: any) => void
}
hooks?: {
[name: string]: (args: any[]) => void
promises?: {
[name: string]: (...args: any[]) => Promise<any>
}
[name: string]: ((...args: any[]) => void) | any
}
middleware?: {
[name: string]: RequestHandler

View file

@ -27,5 +27,6 @@ declare global {
gtag?: (...args: any) => void
propensity?: (propensityId?: string) => void
olLoadGA?: () => void
}
}