mirror of
https://github.com/yu-i-i/overleaf-cep.git
synced 2025-07-23 23:00:08 +02:00
Compare commits
5 commits
570ca81ec7
...
1d3e71645b
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1d3e71645b | ||
![]() |
ca89eceacb | ||
![]() |
27b1d67e3a | ||
![]() |
f52d7b0b27 | ||
![]() |
b7e579b396 |
80 changed files with 551 additions and 1150 deletions
31
package-lock.json
generated
31
package-lock.json
generated
|
@ -35581,7 +35581,6 @@
|
|||
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
|
||||
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
|
||||
"deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"aws-sign2": "~0.7.0",
|
||||
"aws4": "^1.8.0",
|
||||
|
@ -35639,15 +35638,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/request/node_modules/tough-cookie": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz",
|
||||
"integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==",
|
||||
"license": "BSD-3-Clause",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
|
||||
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
|
||||
"dependencies": {
|
||||
"tldts": "^6.1.32"
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/requestretry": {
|
||||
|
@ -39613,24 +39612,6 @@
|
|||
"tlds": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/tldts": {
|
||||
"version": "6.1.86",
|
||||
"resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.86.tgz",
|
||||
"integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tldts-core": "^6.1.86"
|
||||
},
|
||||
"bin": {
|
||||
"tldts": "bin/cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/tldts-core": {
|
||||
"version": "6.1.86",
|
||||
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.86.tgz",
|
||||
"integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tmp": {
|
||||
"version": "0.2.3",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz",
|
||||
|
|
|
@ -33,9 +33,6 @@
|
|||
"path-to-regexp": "3.3.0",
|
||||
"body-parser": "1.20.3",
|
||||
"multer": "2.0.1"
|
||||
},
|
||||
"request@2.88.2": {
|
||||
"tough-cookie": "5.1.2"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
|
|
|
@ -21,11 +21,9 @@ test-e2e-native:
|
|||
|
||||
test-e2e:
|
||||
docker compose build host-admin
|
||||
docker compose up -d host-admin
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e e2e
|
||||
|
||||
test-e2e-open:
|
||||
docker compose up -d host-admin
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e-open e2e-open
|
||||
|
||||
clean:
|
||||
|
|
|
@ -35,7 +35,7 @@ services:
|
|||
MAILTRAP_PASSWORD: 'password-for-mailtrap'
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:6.0
|
||||
command: '--replSet overleaf'
|
||||
volumes:
|
||||
- ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -2,7 +2,6 @@ import {
|
|||
createNewFile,
|
||||
createProject,
|
||||
openProjectById,
|
||||
testNewFileUpload,
|
||||
} from './helpers/project'
|
||||
import { isExcludedBySharding, startWith } from './helpers/config'
|
||||
import { ensureUserExists, login } from './helpers/login'
|
||||
|
@ -120,7 +119,24 @@ describe('editor', () => {
|
|||
cy.get('button').contains('New file').click({ force: true })
|
||||
})
|
||||
|
||||
testNewFileUpload()
|
||||
it('can upload file', () => {
|
||||
const name = `${uuid()}.txt`
|
||||
const content = `Test File Content ${name}`
|
||||
cy.get('button').contains('Upload').click({ force: true })
|
||||
cy.get('input[type=file]')
|
||||
.first()
|
||||
.selectFile(
|
||||
{
|
||||
contents: Cypress.Buffer.from(content),
|
||||
fileName: name,
|
||||
lastModified: Date.now(),
|
||||
},
|
||||
{ force: true }
|
||||
)
|
||||
// force: The file-tree pane is too narrow to display the full name.
|
||||
cy.findByTestId('file-tree').findByText(name).click({ force: true })
|
||||
cy.findByText(content)
|
||||
})
|
||||
|
||||
it('should not display import from URL', () => {
|
||||
cy.findByText('From external URL').should('not.exist')
|
||||
|
|
|
@ -1,104 +0,0 @@
|
|||
import { ensureUserExists, login } from './helpers/login'
|
||||
import {
|
||||
createProject,
|
||||
openProjectById,
|
||||
prepareFileUploadTest,
|
||||
} from './helpers/project'
|
||||
import { isExcludedBySharding, startWith } from './helpers/config'
|
||||
import { prepareWaitForNextCompileSlot } from './helpers/compile'
|
||||
import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
import { purgeFilestoreData, runScript } from './helpers/hostAdminClient'
|
||||
|
||||
describe('filestore migration', function () {
|
||||
if (isExcludedBySharding('CE_CUSTOM_3')) return
|
||||
startWith({ withDataDir: true, resetData: true, vars: {} })
|
||||
ensureUserExists({ email: 'user@example.com' })
|
||||
|
||||
let projectName: string
|
||||
let projectId: string
|
||||
let waitForCompileRateLimitCoolOff: (fn: () => void) => void
|
||||
const previousBinaryFiles: (() => void)[] = []
|
||||
beforeWithReRunOnTestRetry(function () {
|
||||
projectName = `project-${uuid()}`
|
||||
login('user@example.com')
|
||||
createProject(projectName, { type: 'Example project' }).then(
|
||||
id => (projectId = id)
|
||||
)
|
||||
let queueReset
|
||||
;({ waitForCompileRateLimitCoolOff, queueReset } =
|
||||
prepareWaitForNextCompileSlot())
|
||||
queueReset()
|
||||
previousBinaryFiles.push(prepareFileUploadTest(true))
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
login('user@example.com')
|
||||
waitForCompileRateLimitCoolOff(() => {
|
||||
openProjectById(projectId)
|
||||
})
|
||||
})
|
||||
|
||||
function checkFilesAreAccessible() {
|
||||
it('can upload new binary file and read previous uploads', function () {
|
||||
previousBinaryFiles.push(prepareFileUploadTest(true))
|
||||
for (const check of previousBinaryFiles) {
|
||||
check()
|
||||
}
|
||||
})
|
||||
|
||||
it('renders frog jpg', () => {
|
||||
cy.findByTestId('file-tree').findByText('frog.jpg').click()
|
||||
cy.get('[alt="frog.jpg"]')
|
||||
.should('be.visible')
|
||||
.and('have.prop', 'naturalWidth')
|
||||
.should('be.greaterThan', 0)
|
||||
})
|
||||
}
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL not set', function () {
|
||||
startWith({ withDataDir: true, vars: {} })
|
||||
checkFilesAreAccessible()
|
||||
})
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=0', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '0' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=1', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=2', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' },
|
||||
})
|
||||
before(async function () {
|
||||
await runScript({
|
||||
cwd: 'services/history-v1',
|
||||
script: 'storage/scripts/back_fill_file_hash.mjs',
|
||||
})
|
||||
})
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '2' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('purge filestore data', function () {
|
||||
before(async function () {
|
||||
await purgeFilestoreData()
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -9,7 +9,6 @@ export function isExcludedBySharding(
|
|||
| 'CE_DEFAULT'
|
||||
| 'CE_CUSTOM_1'
|
||||
| 'CE_CUSTOM_2'
|
||||
| 'CE_CUSTOM_3'
|
||||
| 'PRO_DEFAULT_1'
|
||||
| 'PRO_DEFAULT_2'
|
||||
| 'PRO_CUSTOM_1'
|
||||
|
|
|
@ -85,12 +85,6 @@ export async function getRedisKeys() {
|
|||
return stdout.split('\n')
|
||||
}
|
||||
|
||||
export async function purgeFilestoreData() {
|
||||
await fetchJSON(`${hostAdminURL}/data/user_files`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
}
|
||||
|
||||
async function sleep(ms: number) {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(resolve, ms)
|
||||
|
|
|
@ -216,43 +216,3 @@ export function createNewFile() {
|
|||
|
||||
return fileName
|
||||
}
|
||||
|
||||
export function prepareFileUploadTest(binary = false) {
|
||||
const name = `${uuid()}.txt`
|
||||
const content = `Test File Content ${name}${binary ? ' \x00' : ''}`
|
||||
cy.get('button').contains('Upload').click({ force: true })
|
||||
cy.get('input[type=file]')
|
||||
.first()
|
||||
.selectFile(
|
||||
{
|
||||
contents: Cypress.Buffer.from(content),
|
||||
fileName: name,
|
||||
lastModified: Date.now(),
|
||||
},
|
||||
{ force: true }
|
||||
)
|
||||
|
||||
// wait for the upload to finish
|
||||
cy.findByRole('treeitem', { name })
|
||||
|
||||
return function check() {
|
||||
cy.findByRole('treeitem', { name }).click()
|
||||
if (binary) {
|
||||
cy.findByText(content).should('not.have.class', 'cm-line')
|
||||
} else {
|
||||
cy.findByText(content).should('have.class', 'cm-line')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function testNewFileUpload() {
|
||||
it('can upload text file', () => {
|
||||
const check = prepareFileUploadTest(false)
|
||||
check()
|
||||
})
|
||||
|
||||
it('can upload binary file', () => {
|
||||
const check = prepareFileUploadTest(true)
|
||||
check()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -29,17 +29,6 @@ const IMAGES = {
|
|||
PRO: process.env.IMAGE_TAG_PRO.replace(/:.+/, ''),
|
||||
}
|
||||
|
||||
function defaultDockerComposeOverride() {
|
||||
return {
|
||||
services: {
|
||||
sharelatex: {
|
||||
environment: {},
|
||||
},
|
||||
'git-bridge': {},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let previousConfig = ''
|
||||
|
||||
function readDockerComposeOverride() {
|
||||
|
@ -49,7 +38,14 @@ function readDockerComposeOverride() {
|
|||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
return defaultDockerComposeOverride
|
||||
return {
|
||||
services: {
|
||||
sharelatex: {
|
||||
environment: {},
|
||||
},
|
||||
'git-bridge': {},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,21 +77,12 @@ app.use(bodyParser.json())
|
|||
app.use((req, res, next) => {
|
||||
// Basic access logs
|
||||
console.log(req.method, req.url, req.body)
|
||||
const json = res.json
|
||||
res.json = body => {
|
||||
console.log(req.method, req.url, req.body, '->', body)
|
||||
json.call(res, body)
|
||||
}
|
||||
next()
|
||||
})
|
||||
app.use((req, res, next) => {
|
||||
// Add CORS headers
|
||||
const accessControlAllowOrigin =
|
||||
process.env.ACCESS_CONTROL_ALLOW_ORIGIN || 'http://sharelatex'
|
||||
res.setHeader('Access-Control-Allow-Origin', accessControlAllowOrigin)
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type')
|
||||
res.setHeader('Access-Control-Max-Age', '3600')
|
||||
res.setHeader('Access-Control-Allow-Methods', 'DELETE, GET, HEAD, POST, PUT')
|
||||
next()
|
||||
})
|
||||
|
||||
|
@ -146,7 +133,6 @@ const allowedVars = Joi.object(
|
|||
'V1_HISTORY_URL',
|
||||
'SANDBOXED_COMPILES',
|
||||
'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES',
|
||||
'OVERLEAF_FILESTORE_MIGRATION_LEVEL',
|
||||
'OVERLEAF_TEMPLATES_USER_ID',
|
||||
'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS',
|
||||
'OVERLEAF_ALLOW_PUBLIC_ACCESS',
|
||||
|
@ -333,19 +319,8 @@ app.get('/redis/keys', (req, res) => {
|
|||
)
|
||||
})
|
||||
|
||||
app.delete('/data/user_files', (req, res) => {
|
||||
runDockerCompose(
|
||||
'exec',
|
||||
['sharelatex', 'rm', '-rf', '/var/lib/overleaf/data/user_files'],
|
||||
(error, stdout, stderr) => {
|
||||
res.json({ error, stdout, stderr })
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
app.use(handleValidationErrors())
|
||||
|
||||
purgeDataDir()
|
||||
writeDockerComposeOverride(defaultDockerComposeOverride())
|
||||
|
||||
app.listen(80)
|
||||
|
|
|
@ -42,7 +42,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -44,7 +44,7 @@ services:
|
|||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -42,7 +42,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -44,7 +44,7 @@ services:
|
|||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -47,7 +47,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -49,7 +49,7 @@ services:
|
|||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -55,7 +55,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -57,7 +57,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -75,7 +75,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -83,7 +83,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -150,6 +150,10 @@ const CONCURRENT_BATCHES = parseInt(process.env.CONCURRENT_BATCHES || '2', 10)
|
|||
const RETRIES = parseInt(process.env.RETRIES || '10', 10)
|
||||
const RETRY_DELAY_MS = parseInt(process.env.RETRY_DELAY_MS || '100', 10)
|
||||
|
||||
const USER_FILES_BUCKET_NAME = process.env.USER_FILES_BUCKET_NAME || ''
|
||||
if (!USER_FILES_BUCKET_NAME) {
|
||||
throw new Error('env var USER_FILES_BUCKET_NAME is missing')
|
||||
}
|
||||
const RETRY_FILESTORE_404 = process.env.RETRY_FILESTORE_404 === 'true'
|
||||
const BUFFER_DIR = fs.mkdtempSync(
|
||||
process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-'
|
||||
|
|
|
@ -9,12 +9,15 @@ import { Blob } from 'overleaf-editor-core'
|
|||
import {
|
||||
BlobStore,
|
||||
getStringLengthOfFile,
|
||||
GLOBAL_BLOBS,
|
||||
makeBlobForFile,
|
||||
} from '../lib/blob_store/index.js'
|
||||
import { db } from '../lib/mongodb.js'
|
||||
import commandLineArgs from 'command-line-args'
|
||||
import readline from 'node:readline'
|
||||
import { _blobIsBackedUp, backupBlob } from '../lib/backupBlob.mjs'
|
||||
import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js'
|
||||
import filestorePersistor from '../lib/persistor.js'
|
||||
import { setTimeout } from 'node:timers/promises'
|
||||
|
||||
// Silence warning.
|
||||
|
@ -49,11 +52,12 @@ ObjectId.cacheHexString = true
|
|||
*/
|
||||
|
||||
/**
|
||||
* @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_MISSING_HASH: boolean, LOGS: string}}
|
||||
* @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_DELETE_PERMISSION: boolean, FIX_MISSING_HASH: boolean, LOGS: string}}
|
||||
*/
|
||||
function parseArgs() {
|
||||
const args = commandLineArgs([
|
||||
{ name: 'fixNotFound', type: String, defaultValue: 'true' },
|
||||
{ name: 'fixDeletePermission', type: String, defaultValue: 'true' },
|
||||
{ name: 'fixHashMismatch', type: String, defaultValue: 'true' },
|
||||
{ name: 'fixMissingHash', type: String, defaultValue: 'true' },
|
||||
{ name: 'logs', type: String, defaultValue: '' },
|
||||
|
@ -70,13 +74,20 @@ function parseArgs() {
|
|||
}
|
||||
return {
|
||||
FIX_HASH_MISMATCH: boolVal('fixNotFound'),
|
||||
FIX_DELETE_PERMISSION: boolVal('fixDeletePermission'),
|
||||
FIX_NOT_FOUND: boolVal('fixHashMismatch'),
|
||||
FIX_MISSING_HASH: boolVal('fixMissingHash'),
|
||||
LOGS: args.logs,
|
||||
}
|
||||
}
|
||||
|
||||
const { FIX_HASH_MISMATCH, FIX_NOT_FOUND, FIX_MISSING_HASH, LOGS } = parseArgs()
|
||||
const {
|
||||
FIX_HASH_MISMATCH,
|
||||
FIX_DELETE_PERMISSION,
|
||||
FIX_NOT_FOUND,
|
||||
FIX_MISSING_HASH,
|
||||
LOGS,
|
||||
} = parseArgs()
|
||||
if (!LOGS) {
|
||||
throw new Error('--logs parameter missing')
|
||||
}
|
||||
|
@ -94,37 +105,6 @@ const STREAM_HIGH_WATER_MARK = parseInt(
|
|||
)
|
||||
const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
|
||||
|
||||
// Filestore endpoint location
|
||||
const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
|
||||
const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
|
||||
|
||||
async function fetchFromFilestore(projectId, fileId) {
|
||||
const url = `http://${FILESTORE_HOST}:${FILESTORE_PORT}/project/${projectId}/file/${fileId}`
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
throw new NotFoundError('file not found in filestore', {
|
||||
status: response.status,
|
||||
})
|
||||
}
|
||||
const body = await response.text()
|
||||
throw new OError('fetchFromFilestore failed', {
|
||||
projectId,
|
||||
fileId,
|
||||
status: response.status,
|
||||
body,
|
||||
})
|
||||
}
|
||||
if (!response.body) {
|
||||
throw new OError('fetchFromFilestore response has no body', {
|
||||
projectId,
|
||||
fileId,
|
||||
status: response.status,
|
||||
})
|
||||
}
|
||||
return response.body
|
||||
}
|
||||
|
||||
/** @type {ProjectsCollection} */
|
||||
const projectsCollection = db.collection('projects')
|
||||
/** @type {DeletedProjectsCollection} */
|
||||
|
@ -322,16 +302,19 @@ async function setHashInMongo(projectId, fileId, hash) {
|
|||
* @return {Promise<void>}
|
||||
*/
|
||||
async function importRestoredFilestoreFile(projectId, fileId, historyId) {
|
||||
const filestoreKey = `${projectId}/${fileId}`
|
||||
const path = `${BUFFER_DIR}/${projectId}_${fileId}`
|
||||
try {
|
||||
let s
|
||||
try {
|
||||
s = await fetchFromFilestore(projectId, fileId)
|
||||
s = await filestorePersistor.getObjectStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
filestoreKey
|
||||
)
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
throw new OError('missing blob, need to restore filestore file', {
|
||||
projectId,
|
||||
fileId,
|
||||
filestoreKey,
|
||||
})
|
||||
}
|
||||
throw err
|
||||
|
@ -342,6 +325,7 @@ async function importRestoredFilestoreFile(projectId, fileId, historyId) {
|
|||
)
|
||||
const blobStore = new BlobStore(historyId)
|
||||
const blob = await blobStore.putFile(path)
|
||||
await backupBlob(historyId, blob, path)
|
||||
await setHashInMongo(projectId, fileId, blob.getHash())
|
||||
} finally {
|
||||
await fs.promises.rm(path, { force: true })
|
||||
|
@ -355,9 +339,13 @@ async function importRestoredFilestoreFile(projectId, fileId, historyId) {
|
|||
* @return {Promise<Blob>}
|
||||
*/
|
||||
async function bufferFilestoreFileToDisk(projectId, fileId, path) {
|
||||
const filestoreKey = `${projectId}/${fileId}`
|
||||
try {
|
||||
await Stream.promises.pipeline(
|
||||
await fetchFromFilestore(projectId, fileId),
|
||||
await filestorePersistor.getObjectStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
filestoreKey
|
||||
),
|
||||
fs.createWriteStream(path, { highWaterMark: STREAM_HIGH_WATER_MARK })
|
||||
)
|
||||
const blob = await makeBlobForFile(path)
|
||||
|
@ -368,8 +356,7 @@ async function bufferFilestoreFileToDisk(projectId, fileId, path) {
|
|||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
throw new OError('missing blob, need to restore filestore file', {
|
||||
projectId,
|
||||
fileId,
|
||||
filestoreKey,
|
||||
})
|
||||
}
|
||||
throw err
|
||||
|
@ -402,7 +389,7 @@ async function uploadFilestoreFile(projectId, fileId) {
|
|||
const blob = await bufferFilestoreFileToDisk(projectId, fileId, path)
|
||||
const hash = blob.getHash()
|
||||
try {
|
||||
await ensureBlobExistsForFile(projectId, fileId, hash)
|
||||
await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
|
||||
} catch (err) {
|
||||
if (!(err instanceof Blob.NotFoundError)) throw err
|
||||
|
||||
|
@ -410,7 +397,7 @@ async function uploadFilestoreFile(projectId, fileId) {
|
|||
const historyId = project.overleaf.history.id.toString()
|
||||
const blobStore = new BlobStore(historyId)
|
||||
await blobStore.putBlob(path, blob)
|
||||
await ensureBlobExistsForFile(projectId, fileId, hash)
|
||||
await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
|
||||
}
|
||||
} finally {
|
||||
await fs.promises.rm(path, { force: true })
|
||||
|
@ -439,7 +426,11 @@ async function fixHashMismatch(line) {
|
|||
await importRestoredFilestoreFile(projectId, fileId, historyId)
|
||||
return true
|
||||
}
|
||||
return await ensureBlobExistsForFile(projectId, fileId, computedHash)
|
||||
return await ensureBlobExistsForFileAndUploadToAWS(
|
||||
projectId,
|
||||
fileId,
|
||||
computedHash
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -453,19 +444,30 @@ async function hashAlreadyUpdatedInFileTree(projectId, fileId, hash) {
|
|||
return fileRef.hash === hash
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} hash
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
async function needsBackingUpToAWS(projectId, hash) {
|
||||
if (GLOBAL_BLOBS.has(hash)) return false
|
||||
return !(await _blobIsBackedUp(projectId, hash))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} fileId
|
||||
* @param {string} hash
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
async function ensureBlobExistsForFile(projectId, fileId, hash) {
|
||||
async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
|
||||
const { project } = await getProject(projectId)
|
||||
const historyId = project.overleaf.history.id.toString()
|
||||
const blobStore = new BlobStore(historyId)
|
||||
if (
|
||||
(await hashAlreadyUpdatedInFileTree(projectId, fileId, hash)) &&
|
||||
(await blobStore.getBlob(hash))
|
||||
(await blobStore.getBlob(hash)) &&
|
||||
!(await needsBackingUpToAWS(projectId, hash))
|
||||
) {
|
||||
return false // already processed
|
||||
}
|
||||
|
@ -486,7 +488,7 @@ async function ensureBlobExistsForFile(projectId, fileId, hash) {
|
|||
)
|
||||
if (writtenBlob.getHash() !== hash) {
|
||||
// Double check download, better safe than sorry.
|
||||
throw new OError('blob corrupted', { writtenBlob, hash })
|
||||
throw new OError('blob corrupted', { writtenBlob })
|
||||
}
|
||||
|
||||
let blob = await blobStore.getBlob(hash)
|
||||
|
@ -495,6 +497,7 @@ async function ensureBlobExistsForFile(projectId, fileId, hash) {
|
|||
// HACK: Skip upload to GCS and finalize putBlob operation directly.
|
||||
await blobStore.backend.insertBlob(historyId, writtenBlob)
|
||||
}
|
||||
await backupBlob(historyId, writtenBlob, path)
|
||||
} finally {
|
||||
await fs.promises.rm(path, { force: true })
|
||||
}
|
||||
|
@ -502,6 +505,16 @@ async function ensureBlobExistsForFile(projectId, fileId, hash) {
|
|||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} line
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
async function fixDeletePermission(line) {
|
||||
let { projectId, fileId, hash } = JSON.parse(line)
|
||||
if (!hash) hash = await computeFilestoreFileHash(projectId, fileId)
|
||||
return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} line
|
||||
* @return {Promise<boolean>}
|
||||
|
@ -513,7 +526,7 @@ async function fixMissingHash(line) {
|
|||
} = await findFile(projectId, fileId)
|
||||
if (hash) {
|
||||
// processed, double check
|
||||
return await ensureBlobExistsForFile(projectId, fileId, hash)
|
||||
return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
|
||||
}
|
||||
await uploadFilestoreFile(projectId, fileId)
|
||||
return true
|
||||
|
@ -530,6 +543,11 @@ const CASES = {
|
|||
flag: FIX_HASH_MISMATCH,
|
||||
action: fixHashMismatch,
|
||||
},
|
||||
'delete permission': {
|
||||
match: 'storage.objects.delete',
|
||||
flag: FIX_DELETE_PERMISSION,
|
||||
action: fixDeletePermission,
|
||||
},
|
||||
'missing file hash': {
|
||||
match: '"bad file hash"',
|
||||
flag: FIX_MISSING_HASH,
|
||||
|
|
|
@ -20,7 +20,7 @@ import {
|
|||
makeProjectKey,
|
||||
} from '../../../../storage/lib/blob_store/index.js'
|
||||
|
||||
import { mockFilestore } from './support/MockFilestore.mjs'
|
||||
import express from 'express'
|
||||
|
||||
chai.use(chaiExclude)
|
||||
const TIMEOUT = 20 * 1_000
|
||||
|
@ -28,6 +28,59 @@ const TIMEOUT = 20 * 1_000
|
|||
const projectsCollection = db.collection('projects')
|
||||
const deletedProjectsCollection = db.collection('deletedProjects')
|
||||
|
||||
class MockFilestore {
|
||||
constructor() {
|
||||
this.host = process.env.FILESTORE_HOST || '127.0.0.1'
|
||||
this.port = process.env.FILESTORE_PORT || 3009
|
||||
// create a server listening on this.host and this.port
|
||||
this.files = {}
|
||||
|
||||
this.app = express()
|
||||
|
||||
this.app.get('/project/:projectId/file/:fileId', (req, res) => {
|
||||
const { projectId, fileId } = req.params
|
||||
const content = this.files[projectId]?.[fileId]
|
||||
if (!content) return res.status(404).end()
|
||||
res.status(200).end(content)
|
||||
})
|
||||
}
|
||||
|
||||
start() {
|
||||
// reset stored files
|
||||
this.files = {}
|
||||
// start the server
|
||||
if (this.serverPromise) {
|
||||
return this.serverPromise
|
||||
} else {
|
||||
this.serverPromise = new Promise((resolve, reject) => {
|
||||
this.server = this.app.listen(this.port, this.host, err => {
|
||||
if (err) return reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
return this.serverPromise
|
||||
}
|
||||
}
|
||||
|
||||
addFile(projectId, fileId, fileContent) {
|
||||
if (!this.files[projectId]) {
|
||||
this.files[projectId] = {}
|
||||
}
|
||||
this.files[projectId][fileId] = fileContent
|
||||
}
|
||||
|
||||
deleteObject(projectId, fileId) {
|
||||
if (this.files[projectId]) {
|
||||
delete this.files[projectId][fileId]
|
||||
if (Object.keys(this.files[projectId]).length === 0) {
|
||||
delete this.files[projectId]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mockFilestore = new MockFilestore()
|
||||
|
||||
/**
|
||||
* @param {ObjectId} objectId
|
||||
* @return {string}
|
||||
|
|
|
@ -1,24 +1,48 @@
|
|||
import fs from 'node:fs'
|
||||
import Crypto from 'node:crypto'
|
||||
import Stream from 'node:stream'
|
||||
import { promisify } from 'node:util'
|
||||
import { Binary, ObjectId } from 'mongodb'
|
||||
import { Blob } from 'overleaf-editor-core'
|
||||
import { db } from '../../../../storage/lib/mongodb.js'
|
||||
import { backedUpBlobs, blobs, db } from '../../../../storage/lib/mongodb.js'
|
||||
import cleanup from './support/cleanup.js'
|
||||
import testProjects from '../api/support/test_projects.js'
|
||||
import { execFile } from 'node:child_process'
|
||||
import chai, { expect } from 'chai'
|
||||
import chaiExclude from 'chai-exclude'
|
||||
import { BlobStore } from '../../../../storage/lib/blob_store/index.js'
|
||||
import { mockFilestore } from './support/MockFilestore.mjs'
|
||||
import config from 'config'
|
||||
import { WritableBuffer } from '@overleaf/stream-utils'
|
||||
import {
|
||||
backupPersistor,
|
||||
projectBlobsBucket,
|
||||
} from '../../../../storage/lib/backupPersistor.mjs'
|
||||
import projectKey from '../../../../storage/lib/project_key.js'
|
||||
import {
|
||||
BlobStore,
|
||||
makeProjectKey,
|
||||
} from '../../../../storage/lib/blob_store/index.js'
|
||||
import ObjectPersistor from '@overleaf/object-persistor'
|
||||
|
||||
chai.use(chaiExclude)
|
||||
|
||||
const TIMEOUT = 20 * 1_000
|
||||
|
||||
const { deksBucket } = config.get('backupStore')
|
||||
const { tieringStorageClass } = config.get('backupPersistor')
|
||||
|
||||
const projectsCollection = db.collection('projects')
|
||||
const deletedProjectsCollection = db.collection('deletedProjects')
|
||||
|
||||
const FILESTORE_PERSISTOR = ObjectPersistor({
|
||||
backend: 'gcs',
|
||||
gcs: {
|
||||
endpoint: {
|
||||
apiEndpoint: process.env.GCS_API_ENDPOINT,
|
||||
projectId: process.env.GCS_PROJECT_ID,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* @param {ObjectId} objectId
|
||||
* @return {string}
|
||||
|
@ -46,6 +70,17 @@ function binaryForGitBlobHash(gitBlobHash) {
|
|||
return new Binary(Buffer.from(gitBlobHash, 'hex'))
|
||||
}
|
||||
|
||||
async function listS3Bucket(bucket, wantStorageClass) {
|
||||
const client = backupPersistor._getClientForBucket(bucket)
|
||||
const response = await client.listObjectsV2({ Bucket: bucket }).promise()
|
||||
|
||||
for (const object of response.Contents || []) {
|
||||
expect(object).to.have.property('StorageClass', wantStorageClass)
|
||||
}
|
||||
|
||||
return (response.Contents || []).map(item => item.Key || '')
|
||||
}
|
||||
|
||||
function objectIdFromTime(timestamp) {
|
||||
return ObjectId.createFromTime(new Date(timestamp).getTime() / 1000)
|
||||
}
|
||||
|
@ -62,6 +97,7 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
const historyIdDeleted0 = projectIdDeleted0.toString()
|
||||
const fileIdWithDifferentHashFound = objectIdFromTime('2017-02-01T00:00:00Z')
|
||||
const fileIdInGoodState = objectIdFromTime('2017-02-01T00:01:00Z')
|
||||
const fileIdBlobExistsInGCS0 = objectIdFromTime('2017-02-01T00:02:00Z')
|
||||
const fileIdWithDifferentHashNotFound0 = objectIdFromTime(
|
||||
'2017-02-01T00:03:00Z'
|
||||
)
|
||||
|
@ -76,6 +112,9 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
const fileIdWithDifferentHashRestore = objectIdFromTime(
|
||||
'2017-02-01T00:08:00Z'
|
||||
)
|
||||
const fileIdBlobExistsInGCS1 = objectIdFromTime('2017-02-01T00:09:00Z')
|
||||
const fileIdRestoreFromFilestore0 = objectIdFromTime('2017-02-01T00:10:00Z')
|
||||
const fileIdRestoreFromFilestore1 = objectIdFromTime('2017-02-01T00:11:00Z')
|
||||
const fileIdMissing2 = objectIdFromTime('2017-02-01T00:12:00Z')
|
||||
const fileIdHashMissing0 = objectIdFromTime('2017-02-01T00:13:00Z')
|
||||
const fileIdHashMissing1 = objectIdFromTime('2017-02-01T00:14:00Z')
|
||||
|
@ -86,11 +125,31 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
)
|
||||
const deleteProjectsRecordId0 = new ObjectId()
|
||||
const writtenBlobs = [
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdBlobExistsInGCS0,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdBlobExistsInGCS1,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdWithDifferentHashNotFound0,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdRestoreFromFilestore0,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdRestoreFromFilestore1,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
|
@ -141,6 +200,17 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
},
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdRestoreFromFilestore0,
|
||||
err: { message: 'OError: hash mismatch' },
|
||||
hash: gitBlobHash(fileIdRestoreFromFilestore0),
|
||||
entry: {
|
||||
ctx: { historyId: historyId0.toString() },
|
||||
hash: hashDoesNotExistAsBlob,
|
||||
},
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectIdDeleted0,
|
||||
fileId: fileIdWithDifferentHashNotFound1,
|
||||
|
@ -166,6 +236,33 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
err: { message: 'NotFoundError' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdBlobExistsInGCS0,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS0),
|
||||
err: { message: 'storage.objects.delete' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdBlobExistsInGCSCorrupted,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
|
||||
err: { message: 'storage.objects.delete' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdBlobExistsInGCS1,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS1),
|
||||
err: { message: 'storage.objects.delete' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdRestoreFromFilestore1,
|
||||
err: { message: 'storage.objects.delete' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectIdDeleted0,
|
||||
fileId: fileIdMissing1,
|
||||
|
@ -194,23 +291,22 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
reason: 'bad file hash',
|
||||
msg: 'bad file-tree path',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
_id: fileIdBlobExistsInGCSCorrupted,
|
||||
reason: 'bad file hash',
|
||||
msg: 'bad file-tree path',
|
||||
},
|
||||
]
|
||||
if (PRINT_IDS_AND_HASHES_FOR_DEBUGGING) {
|
||||
const fileIds = {
|
||||
fileIdWithDifferentHashFound,
|
||||
fileIdInGoodState,
|
||||
fileIdBlobExistsInGCS0,
|
||||
fileIdBlobExistsInGCS1,
|
||||
fileIdWithDifferentHashNotFound0,
|
||||
fileIdWithDifferentHashNotFound1,
|
||||
fileIdBlobExistsInGCSCorrupted,
|
||||
fileIdMissing0,
|
||||
fileIdMissing1,
|
||||
fileIdMissing2,
|
||||
fileIdWithDifferentHashRestore,
|
||||
fileIdRestoreFromFilestore0,
|
||||
fileIdRestoreFromFilestore1,
|
||||
fileIdHashMissing0,
|
||||
fileIdHashMissing1,
|
||||
}
|
||||
|
@ -234,25 +330,38 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
before(cleanup.everything)
|
||||
|
||||
before('populate blobs/GCS', async function () {
|
||||
await mockFilestore.start()
|
||||
mockFilestore.addFile(
|
||||
projectId0,
|
||||
fileIdHashMissing0,
|
||||
fileIdHashMissing0.toString()
|
||||
await FILESTORE_PERSISTOR.sendStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
`${projectId0}/${fileIdRestoreFromFilestore0}`,
|
||||
Stream.Readable.from([fileIdRestoreFromFilestore0.toString()])
|
||||
)
|
||||
mockFilestore.addFile(
|
||||
projectId0,
|
||||
fileIdHashMissing1,
|
||||
fileIdHashMissing1.toString()
|
||||
await FILESTORE_PERSISTOR.sendStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
`${projectId0}/${fileIdRestoreFromFilestore1}`,
|
||||
Stream.Readable.from([fileIdRestoreFromFilestore1.toString()])
|
||||
)
|
||||
mockFilestore.addFile(
|
||||
projectId0,
|
||||
fileIdBlobExistsInGCSCorrupted,
|
||||
fileIdBlobExistsInGCSCorrupted.toString()
|
||||
await FILESTORE_PERSISTOR.sendStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
`${projectId0}/${fileIdHashMissing0}`,
|
||||
Stream.Readable.from([fileIdHashMissing0.toString()])
|
||||
)
|
||||
await FILESTORE_PERSISTOR.sendStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
`${projectId0}/${fileIdHashMissing1}`,
|
||||
Stream.Readable.from([fileIdHashMissing1.toString()])
|
||||
)
|
||||
await new BlobStore(historyId0.toString()).putString(
|
||||
fileIdHashMissing1.toString() // partially processed
|
||||
)
|
||||
await new BlobStore(historyId0.toString()).putString(
|
||||
fileIdBlobExistsInGCS0.toString()
|
||||
)
|
||||
await new BlobStore(historyId0.toString()).putString(
|
||||
fileIdBlobExistsInGCS1.toString()
|
||||
)
|
||||
await new BlobStore(historyId0.toString()).putString(
|
||||
fileIdRestoreFromFilestore1.toString()
|
||||
)
|
||||
const path = '/tmp/test-blob-corrupted'
|
||||
try {
|
||||
await fs.promises.writeFile(path, contentCorruptedBlob)
|
||||
|
@ -317,10 +426,22 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
_id: fileIdWithDifferentHashNotFound0,
|
||||
hash: hashDoesNotExistAsBlob,
|
||||
},
|
||||
{
|
||||
_id: fileIdRestoreFromFilestore0,
|
||||
hash: hashDoesNotExistAsBlob,
|
||||
},
|
||||
{
|
||||
_id: fileIdRestoreFromFilestore1,
|
||||
},
|
||||
{
|
||||
_id: fileIdBlobExistsInGCS0,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS0),
|
||||
},
|
||||
{
|
||||
_id: fileIdBlobExistsInGCSCorrupted,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
|
||||
},
|
||||
{ _id: fileIdBlobExistsInGCS1 },
|
||||
],
|
||||
folders: [],
|
||||
},
|
||||
|
@ -425,8 +546,8 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
})
|
||||
it('should print stats', function () {
|
||||
expect(stats).to.contain({
|
||||
processedLines: 12,
|
||||
success: 7,
|
||||
processedLines: 16,
|
||||
success: 11,
|
||||
alreadyProcessed: 0,
|
||||
fileDeleted: 0,
|
||||
skipped: 0,
|
||||
|
@ -437,9 +558,9 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
it('should handle re-run on same logs', async function () {
|
||||
;({ stats } = await runScriptWithLogs())
|
||||
expect(stats).to.contain({
|
||||
processedLines: 12,
|
||||
processedLines: 16,
|
||||
success: 0,
|
||||
alreadyProcessed: 4,
|
||||
alreadyProcessed: 8,
|
||||
fileDeleted: 3,
|
||||
skipped: 0,
|
||||
failed: 3,
|
||||
|
@ -542,11 +663,31 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
_id: fileIdWithDifferentHashNotFound0,
|
||||
hash: gitBlobHash(fileIdWithDifferentHashNotFound0),
|
||||
},
|
||||
// Updated hash
|
||||
{
|
||||
_id: fileIdRestoreFromFilestore0,
|
||||
hash: gitBlobHash(fileIdRestoreFromFilestore0),
|
||||
},
|
||||
// Added hash
|
||||
{
|
||||
_id: fileIdRestoreFromFilestore1,
|
||||
hash: gitBlobHash(fileIdRestoreFromFilestore1),
|
||||
},
|
||||
// No change, blob created
|
||||
{
|
||||
_id: fileIdBlobExistsInGCS0,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS0),
|
||||
},
|
||||
// No change, flagged
|
||||
{
|
||||
_id: fileIdBlobExistsInGCSCorrupted,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
|
||||
},
|
||||
// Added hash
|
||||
{
|
||||
_id: fileIdBlobExistsInGCS1,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS1),
|
||||
},
|
||||
],
|
||||
folders: [],
|
||||
},
|
||||
|
@ -555,7 +696,7 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
],
|
||||
overleaf: { history: { id: historyId0 } },
|
||||
// Incremented when removing file/updating hash
|
||||
version: 5,
|
||||
version: 8,
|
||||
},
|
||||
])
|
||||
expect(await deletedProjectsCollection.find({}).toArray()).to.deep.equal([
|
||||
|
@ -604,6 +745,62 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
(writtenBlobsByProject.get(projectId) || []).concat([fileId])
|
||||
)
|
||||
}
|
||||
expect(
|
||||
(await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map(
|
||||
entry => {
|
||||
// blobs are pushed unordered into mongo. Sort the list for consistency.
|
||||
entry.blobs.sort()
|
||||
return entry
|
||||
}
|
||||
)
|
||||
).to.deep.equal(
|
||||
Array.from(writtenBlobsByProject.entries()).map(
|
||||
([projectId, fileIds]) => {
|
||||
return {
|
||||
_id: projectId,
|
||||
blobs: fileIds
|
||||
.map(fileId => binaryForGitBlobHash(gitBlobHash(fileId)))
|
||||
.sort(),
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
it('should have backed up all the files', async function () {
|
||||
expect(tieringStorageClass).to.exist
|
||||
const objects = await listS3Bucket(projectBlobsBucket, tieringStorageClass)
|
||||
expect(objects.sort()).to.deep.equal(
|
||||
writtenBlobs
|
||||
.map(({ historyId, fileId, hash }) =>
|
||||
makeProjectKey(historyId, hash || gitBlobHash(fileId))
|
||||
)
|
||||
.sort()
|
||||
)
|
||||
for (let { historyId, fileId } of writtenBlobs) {
|
||||
const hash = gitBlobHash(fileId.toString())
|
||||
const s = await backupPersistor.getObjectStream(
|
||||
projectBlobsBucket,
|
||||
makeProjectKey(historyId, hash),
|
||||
{ autoGunzip: true }
|
||||
)
|
||||
const buf = new WritableBuffer()
|
||||
await Stream.promises.pipeline(s, buf)
|
||||
expect(gitBlobHashBuffer(buf.getContents())).to.equal(hash)
|
||||
const id = buf.getContents().toString('utf-8')
|
||||
expect(id).to.equal(fileId.toString())
|
||||
// double check we are not comparing 'undefined' or '[object Object]' above
|
||||
expect(id).to.match(/^[a-f0-9]{24}$/)
|
||||
}
|
||||
const deks = await listS3Bucket(deksBucket, 'STANDARD')
|
||||
expect(deks.sort()).to.deep.equal(
|
||||
Array.from(
|
||||
new Set(
|
||||
writtenBlobs.map(
|
||||
({ historyId }) => projectKey.format(historyId) + '/dek'
|
||||
)
|
||||
)
|
||||
).sort()
|
||||
)
|
||||
})
|
||||
it('should have written the back filled files to history v1', async function () {
|
||||
for (const { historyId, fileId } of writtenBlobs) {
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
import express from 'express'
|
||||
|
||||
class MockFilestore {
|
||||
constructor() {
|
||||
this.host = process.env.FILESTORE_HOST || '127.0.0.1'
|
||||
this.port = process.env.FILESTORE_PORT || 3009
|
||||
// create a server listening on this.host and this.port
|
||||
this.files = {}
|
||||
|
||||
this.app = express()
|
||||
|
||||
this.app.get('/project/:projectId/file/:fileId', (req, res) => {
|
||||
const { projectId, fileId } = req.params
|
||||
const content = this.files[projectId]?.[fileId]
|
||||
if (!content) return res.status(404).end()
|
||||
res.status(200).end(content)
|
||||
})
|
||||
}
|
||||
|
||||
start() {
|
||||
// reset stored files
|
||||
this.files = {}
|
||||
// start the server
|
||||
if (this.serverPromise) {
|
||||
return this.serverPromise
|
||||
} else {
|
||||
this.serverPromise = new Promise((resolve, reject) => {
|
||||
this.server = this.app.listen(this.port, this.host, err => {
|
||||
if (err) return reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
return this.serverPromise
|
||||
}
|
||||
}
|
||||
|
||||
addFile(projectId, fileId, fileContent) {
|
||||
if (!this.files[projectId]) {
|
||||
this.files[projectId] = {}
|
||||
}
|
||||
this.files[projectId][fileId] = fileContent
|
||||
}
|
||||
|
||||
deleteObject(projectId, fileId) {
|
||||
if (this.files[projectId]) {
|
||||
delete this.files[projectId][fileId]
|
||||
if (Object.keys(this.files[projectId]).length === 0) {
|
||||
delete this.files[projectId]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const mockFilestore = new MockFilestore()
|
|
@ -42,7 +42,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -44,7 +44,7 @@ services:
|
|||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -55,7 +55,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -57,7 +57,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -56,8 +56,14 @@ if (Settings.catchErrors) {
|
|||
// Create ./data/dumpFolder if needed
|
||||
FileWriter.ensureDumpFolderExists()
|
||||
|
||||
// Validate combination of feature flags.
|
||||
Features.validateSettings()
|
||||
if (
|
||||
!Features.hasFeature('project-history-blobs') &&
|
||||
!Features.hasFeature('filestore')
|
||||
) {
|
||||
throw new Error(
|
||||
'invalid config: must enable either project-history-blobs (Settings.enableProjectHistoryBlobs=true) or enable filestore (Settings.disableFilestore=false)'
|
||||
)
|
||||
}
|
||||
|
||||
// handle SIGTERM for graceful shutdown in kubernetes
|
||||
process.on('SIGTERM', function (signal) {
|
||||
|
|
|
@ -36,22 +36,7 @@ function send401WithChallenge(res) {
|
|||
function checkCredentials(userDetailsMap, user, password) {
|
||||
const expectedPassword = userDetailsMap.get(user)
|
||||
const userExists = userDetailsMap.has(user) && expectedPassword // user exists with a non-null password
|
||||
|
||||
let isValid = false
|
||||
if (userExists) {
|
||||
if (Array.isArray(expectedPassword)) {
|
||||
const isValidPrimary = Boolean(
|
||||
expectedPassword[0] && tsscmp(expectedPassword[0], password)
|
||||
)
|
||||
const isValidFallback = Boolean(
|
||||
expectedPassword[1] && tsscmp(expectedPassword[1], password)
|
||||
)
|
||||
isValid = isValidPrimary || isValidFallback
|
||||
} else {
|
||||
isValid = tsscmp(expectedPassword, password)
|
||||
}
|
||||
}
|
||||
|
||||
const isValid = userExists && tsscmp(expectedPassword, password)
|
||||
if (!isValid) {
|
||||
logger.err({ user }, 'invalid login details')
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ import logger from '@overleaf/logger'
|
|||
import _ from 'lodash'
|
||||
import { plainTextResponse } from '../../infrastructure/Response.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import Modules from '../../infrastructure/Modules.js'
|
||||
|
||||
async function getDocument(req, res) {
|
||||
const { Project_id: projectId, doc_id: docId } = req.params
|
||||
|
@ -93,9 +92,6 @@ async function setDocument(req, res) {
|
|||
{ docId, projectId },
|
||||
'finished receiving set document request from api (docupdater)'
|
||||
)
|
||||
|
||||
await Modules.promises.hooks.fire('docModified', projectId, docId)
|
||||
|
||||
res.json(result)
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ function projectHistoryURLWithFilestoreFallback(
|
|||
) {
|
||||
const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}`
|
||||
// TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests.
|
||||
if (fileRef.hash && Settings.filestoreMigrationLevel >= 1) {
|
||||
if (fileRef.hash && Settings.enableProjectHistoryBlobs) {
|
||||
return {
|
||||
url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
|
||||
fallbackURL: filestoreURL,
|
||||
|
|
|
@ -66,7 +66,7 @@ function uploadProject(req, res, next) {
|
|||
async function uploadFile(req, res, next) {
|
||||
const timer = new metrics.Timer('file-upload')
|
||||
const name = req.body.name
|
||||
const { path } = req.file
|
||||
const path = req.file?.path
|
||||
const projectId = req.params.Project_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
let { folder_id: folderId } = req.query
|
||||
|
@ -162,14 +162,8 @@ function multerMiddleware(req, res, next) {
|
|||
.status(422)
|
||||
.json({ success: false, error: req.i18n.translate('file_too_large') })
|
||||
}
|
||||
if (err) return next(err)
|
||||
if (!req.file?.path) {
|
||||
logger.info({ req }, 'missing req.file.path on upload')
|
||||
return res
|
||||
.status(400)
|
||||
.json({ success: false, error: 'invalid_upload_request' })
|
||||
}
|
||||
next()
|
||||
|
||||
return next(err)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,8 @@ const trackChangesModuleAvailable =
|
|||
* @property {boolean | undefined} enableGithubSync
|
||||
* @property {boolean | undefined} enableGitBridge
|
||||
* @property {boolean | undefined} enableHomepage
|
||||
* @property {number} filestoreMigrationLevel
|
||||
* @property {boolean | undefined} enableProjectHistoryBlobs
|
||||
* @property {boolean | undefined} disableFilestore
|
||||
* @property {boolean | undefined} enableSaml
|
||||
* @property {boolean | undefined} ldap
|
||||
* @property {boolean | undefined} oauth
|
||||
|
@ -29,14 +30,6 @@ const trackChangesModuleAvailable =
|
|||
*/
|
||||
|
||||
const Features = {
|
||||
validateSettings() {
|
||||
if (![0, 1, 2].includes(Settings.filestoreMigrationLevel)) {
|
||||
throw new Error(
|
||||
`invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2`
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
|
@ -96,9 +89,9 @@ const Features = {
|
|||
Settings.enabledLinkedFileTypes.includes('url')
|
||||
)
|
||||
case 'project-history-blobs':
|
||||
return Settings.filestoreMigrationLevel > 0
|
||||
return Boolean(Settings.enableProjectHistoryBlobs)
|
||||
case 'filestore':
|
||||
return Settings.filestoreMigrationLevel < 2
|
||||
return Boolean(Settings.disableFilestore) === false
|
||||
case 'support':
|
||||
return supportModuleAvailable
|
||||
case 'symbol-palette':
|
||||
|
|
|
@ -150,7 +150,8 @@ async function linkedFileAgentsIncludes() {
|
|||
async function attachHooks() {
|
||||
for (const module of await modules()) {
|
||||
const { promises, ...hooks } = module.hooks || {}
|
||||
for (const [hook, method] of Object.entries(promises || {})) {
|
||||
for (const hook in promises || {}) {
|
||||
const method = promises[hook]
|
||||
attachHook(hook, method)
|
||||
}
|
||||
for (const hook in hooks || {}) {
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
section.cookie-banner.hidden-print.hidden(aria-label=translate('cookie_banner'))
|
||||
.cookie-banner-content !{translate('cookie_banner_info', {}, [{ name: 'a', attrs: { href: '/legal#Cookies' }}])}
|
||||
section.cookie-banner.hidden-print.hidden(aria-label='Cookie banner')
|
||||
.cookie-banner-content We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our <a href="/legal#Cookies">cookie policy</a>.
|
||||
.cookie-banner-actions
|
||||
button(
|
||||
type='button'
|
||||
class='btn btn-link btn-sm'
|
||||
data-ol-cookie-banner-set-consent='essential'
|
||||
) #{translate('essential_cookies_only')}
|
||||
) Essential cookies only
|
||||
button(
|
||||
type='button'
|
||||
class='btn btn-primary btn-sm'
|
||||
data-ol-cookie-banner-set-consent='all'
|
||||
) #{translate('accept_all_cookies')}
|
||||
) Accept all cookies
|
||||
|
|
|
@ -4,7 +4,7 @@ block vars
|
|||
- var suppressNavbar = true
|
||||
- var suppressFooter = true
|
||||
- var suppressSkipToContent = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- var suppressCookieBanner = true
|
||||
|
||||
block content
|
||||
.content.content-alt
|
||||
|
|
|
@ -24,7 +24,7 @@ block body
|
|||
else
|
||||
include layout/fat-footer
|
||||
|
||||
if typeof suppressPugCookieBanner == 'undefined'
|
||||
if typeof suppressCookieBanner == 'undefined'
|
||||
include _cookie_banner
|
||||
|
||||
if bootstrapVersion === 5
|
||||
|
|
|
@ -69,5 +69,5 @@ block body
|
|||
else
|
||||
include layout/fat-footer-react-bootstrap-5
|
||||
|
||||
if typeof suppressPugCookieBanner === 'undefined'
|
||||
if typeof suppressCookieBanner === 'undefined'
|
||||
include _cookie_banner
|
||||
|
|
|
@ -27,7 +27,7 @@ block body
|
|||
else
|
||||
include layout/fat-footer-website-redesign
|
||||
|
||||
if typeof suppressPugCookieBanner == 'undefined'
|
||||
if typeof suppressCookieBanner == 'undefined'
|
||||
include _cookie_banner
|
||||
|
||||
block contactModal
|
||||
|
|
|
@ -2,7 +2,7 @@ extends ../../layout-marketing
|
|||
|
||||
block vars
|
||||
- var suppressFooter = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- var suppressCookieBanner = true
|
||||
- var suppressSkipToContent = true
|
||||
|
||||
block content
|
||||
|
|
|
@ -7,7 +7,7 @@ block vars
|
|||
- var suppressNavbar = true
|
||||
- var suppressFooter = true
|
||||
- var suppressSkipToContent = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- var suppressCookieBanner = true
|
||||
- metadata.robotsNoindexNofollow = true
|
||||
|
||||
block content
|
||||
|
|
|
@ -7,7 +7,6 @@ block vars
|
|||
- const suppressNavContentLinks = true
|
||||
- const suppressNavbar = true
|
||||
- const suppressFooter = true
|
||||
- const suppressPugCookieBanner = true
|
||||
|
||||
block append meta
|
||||
meta(
|
||||
|
|
|
@ -5,7 +5,7 @@ block entrypointVar
|
|||
|
||||
block vars
|
||||
- var suppressFooter = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- var suppressCookieBanner = true
|
||||
- var suppressSkipToContent = true
|
||||
|
||||
block append meta
|
||||
|
|
|
@ -5,7 +5,7 @@ block entrypointVar
|
|||
|
||||
block vars
|
||||
- var suppressFooter = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- var suppressCookieBanner = true
|
||||
- var suppressSkipToContent = true
|
||||
|
||||
block append meta
|
||||
|
|
|
@ -440,9 +440,6 @@ module.exports = {
|
|||
','
|
||||
),
|
||||
|
||||
filestoreMigrationLevel:
|
||||
parseInt(process.env.OVERLEAF_FILESTORE_MIGRATION_LEVEL, 10) || 0,
|
||||
|
||||
// i18n
|
||||
// ------
|
||||
//
|
||||
|
|
|
@ -95,7 +95,7 @@ services:
|
|||
image: redis:7.4.3
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
logging:
|
||||
driver: none
|
||||
command: --replSet overleaf
|
||||
|
|
|
@ -91,7 +91,7 @@ services:
|
|||
image: redis:7.4.3
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -36,7 +36,6 @@
|
|||
"about_to_remove_user_preamble": "",
|
||||
"about_to_trash_projects": "",
|
||||
"abstract": "",
|
||||
"accept_all_cookies": "",
|
||||
"accept_and_continue": "",
|
||||
"accept_change": "",
|
||||
"accept_change_error_description": "",
|
||||
|
@ -338,8 +337,6 @@
|
|||
"continue_to": "",
|
||||
"continue_using_free_features": "",
|
||||
"continue_with_free_plan": "",
|
||||
"cookie_banner": "",
|
||||
"cookie_banner_info": "",
|
||||
"copied": "",
|
||||
"copy": "",
|
||||
"copy_code": "",
|
||||
|
@ -555,7 +552,6 @@
|
|||
"error_opening_document_detail": "",
|
||||
"error_performing_request": "",
|
||||
"error_processing_file": "",
|
||||
"essential_cookies_only": "",
|
||||
"example_project": "",
|
||||
"existing_plan_active_until_term_end": "",
|
||||
"expand": "",
|
||||
|
@ -875,7 +871,6 @@
|
|||
"invalid_password_too_similar": "",
|
||||
"invalid_regular_expression": "",
|
||||
"invalid_request": "",
|
||||
"invalid_upload_request": "",
|
||||
"invite": "",
|
||||
"invite_expired": "",
|
||||
"invite_more_collabs": "",
|
||||
|
|
53
services/web/frontend/js/features/cookie-banner/index.js
Normal file
53
services/web/frontend/js/features/cookie-banner/index.js
Normal file
|
@ -0,0 +1,53 @@
|
|||
import getMeta from '@/utils/meta'
|
||||
|
||||
function loadGA() {
|
||||
if (window.olLoadGA) {
|
||||
window.olLoadGA()
|
||||
}
|
||||
}
|
||||
|
||||
function setConsent(value) {
|
||||
document.querySelector('.cookie-banner').classList.add('hidden')
|
||||
const cookieDomain = getMeta('ol-ExposedSettings').cookieDomain
|
||||
const oneYearInSeconds = 60 * 60 * 24 * 365
|
||||
const cookieAttributes =
|
||||
'; path=/' +
|
||||
'; domain=' +
|
||||
cookieDomain +
|
||||
'; max-age=' +
|
||||
oneYearInSeconds +
|
||||
'; SameSite=Lax; Secure'
|
||||
if (value === 'all') {
|
||||
document.cookie = 'oa=1' + cookieAttributes
|
||||
loadGA()
|
||||
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: true }))
|
||||
} else {
|
||||
document.cookie = 'oa=0' + cookieAttributes
|
||||
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: false }))
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
getMeta('ol-ExposedSettings').gaToken ||
|
||||
getMeta('ol-ExposedSettings').gaTokenV4 ||
|
||||
getMeta('ol-ExposedSettings').propensityId ||
|
||||
getMeta('ol-ExposedSettings').hotjarId
|
||||
) {
|
||||
document
|
||||
.querySelectorAll('[data-ol-cookie-banner-set-consent]')
|
||||
.forEach(el => {
|
||||
el.addEventListener('click', function (e) {
|
||||
e.preventDefault()
|
||||
const consentType = el.getAttribute('data-ol-cookie-banner-set-consent')
|
||||
setConsent(consentType)
|
||||
})
|
||||
})
|
||||
|
||||
const oaCookie = document.cookie.split('; ').find(c => c.startsWith('oa='))
|
||||
if (!oaCookie) {
|
||||
const cookieBannerEl = document.querySelector('.cookie-banner')
|
||||
if (cookieBannerEl) {
|
||||
cookieBannerEl.classList.remove('hidden')
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
import {
|
||||
CookieConsentValue,
|
||||
cookieBannerRequired,
|
||||
hasMadeCookieChoice,
|
||||
setConsent,
|
||||
} from '@/features/cookie-banner/utils'
|
||||
|
||||
function toggleCookieBanner(hidden: boolean) {
|
||||
const cookieBannerEl = document.querySelector('.cookie-banner')
|
||||
if (cookieBannerEl) {
|
||||
cookieBannerEl.classList.toggle('hidden', hidden)
|
||||
}
|
||||
}
|
||||
|
||||
if (cookieBannerRequired()) {
|
||||
document
|
||||
.querySelectorAll('[data-ol-cookie-banner-set-consent]')
|
||||
.forEach(el => {
|
||||
el.addEventListener('click', function (e) {
|
||||
e.preventDefault()
|
||||
toggleCookieBanner(true)
|
||||
const consentType = el.getAttribute(
|
||||
'data-ol-cookie-banner-set-consent'
|
||||
) as CookieConsentValue | null
|
||||
setConsent(consentType)
|
||||
})
|
||||
})
|
||||
|
||||
if (!hasMadeCookieChoice()) {
|
||||
toggleCookieBanner(false)
|
||||
}
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
import getMeta from '@/utils/meta'
|
||||
|
||||
export type CookieConsentValue = 'all' | 'essential'
|
||||
|
||||
function loadGA() {
|
||||
if (window.olLoadGA) {
|
||||
window.olLoadGA()
|
||||
}
|
||||
}
|
||||
|
||||
export function setConsent(value: CookieConsentValue | null) {
|
||||
const cookieDomain = getMeta('ol-ExposedSettings').cookieDomain
|
||||
const oneYearInSeconds = 60 * 60 * 24 * 365
|
||||
const cookieAttributes =
|
||||
'; path=/' +
|
||||
'; domain=' +
|
||||
cookieDomain +
|
||||
'; max-age=' +
|
||||
oneYearInSeconds +
|
||||
'; SameSite=Lax; Secure'
|
||||
if (value === 'all') {
|
||||
document.cookie = 'oa=1' + cookieAttributes
|
||||
loadGA()
|
||||
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: true }))
|
||||
} else {
|
||||
document.cookie = 'oa=0' + cookieAttributes
|
||||
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: false }))
|
||||
}
|
||||
}
|
||||
|
||||
export function cookieBannerRequired() {
|
||||
const exposedSettings = getMeta('ol-ExposedSettings')
|
||||
return Boolean(
|
||||
exposedSettings.gaToken ||
|
||||
exposedSettings.gaTokenV4 ||
|
||||
exposedSettings.propensityId ||
|
||||
exposedSettings.hotjarId
|
||||
)
|
||||
}
|
||||
|
||||
export function hasMadeCookieChoice() {
|
||||
return document.cookie.split('; ').some(c => c.startsWith('oa='))
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
import { useTranslation, Trans } from 'react-i18next'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { FetchError } from '../../../../infrastructure/fetch-json'
|
||||
import RedirectToLogin from './redirect-to-login'
|
||||
import {
|
||||
|
@ -7,7 +7,6 @@ import {
|
|||
InvalidFilenameError,
|
||||
} from '../../errors'
|
||||
import DangerMessage from './danger-message'
|
||||
import getMeta from '@/utils/meta'
|
||||
|
||||
// TODO: Update the error type when we properly type FileTreeActionableContext
|
||||
export default function ErrorMessage({
|
||||
|
@ -16,7 +15,6 @@ export default function ErrorMessage({
|
|||
error: string | Record<string, any>
|
||||
}) {
|
||||
const { t } = useTranslation()
|
||||
const { isOverleaf } = getMeta('ol-ExposedSettings')
|
||||
const fileNameLimit = 150
|
||||
|
||||
// the error is a string
|
||||
|
@ -48,22 +46,6 @@ export default function ErrorMessage({
|
|||
</DangerMessage>
|
||||
)
|
||||
|
||||
case 'invalid_upload_request':
|
||||
if (!isOverleaf) {
|
||||
return (
|
||||
<DangerMessage>{t('generic_something_went_wrong')}</DangerMessage>
|
||||
)
|
||||
}
|
||||
return (
|
||||
<DangerMessage>
|
||||
<Trans
|
||||
i18nKey="invalid_upload_request"
|
||||
// eslint-disable-next-line jsx-a11y/anchor-has-content, react/jsx-key
|
||||
components={[<a href="/contact" target="_blank" />]}
|
||||
/>
|
||||
</DangerMessage>
|
||||
)
|
||||
|
||||
case 'duplicate_file_name':
|
||||
return (
|
||||
<DangerMessage>
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import { MessageProps } from '@/features/chat/components/message'
|
||||
import { User } from '../../../../../../types/user'
|
||||
import {
|
||||
getBackgroundColorForUserId,
|
||||
hslStringToLuminance,
|
||||
} from '@/shared/utils/colors'
|
||||
import { getHueForUserId } from '@/shared/utils/colors'
|
||||
import MessageContent from '@/features/chat/components/message-content'
|
||||
import classNames from 'classnames'
|
||||
import MaterialIcon from '@/shared/components/material-icon'
|
||||
import { t } from 'i18next'
|
||||
|
||||
function hue(user?: User) {
|
||||
return user ? getHueForUserId(user.id) : 0
|
||||
}
|
||||
|
||||
function getAvatarStyle(user?: User) {
|
||||
if (!user?.id) {
|
||||
// Deleted user
|
||||
|
@ -19,15 +20,9 @@ function getAvatarStyle(user?: User) {
|
|||
}
|
||||
}
|
||||
|
||||
const backgroundColor = getBackgroundColorForUserId(user.id)
|
||||
|
||||
return {
|
||||
borderColor: backgroundColor,
|
||||
backgroundColor,
|
||||
color:
|
||||
hslStringToLuminance(backgroundColor) < 0.5
|
||||
? 'var(--content-primary-dark)'
|
||||
: 'var(--content-primary)',
|
||||
borderColor: `hsl(${hue(user)}, 85%, 40%)`,
|
||||
backgroundColor: `hsl(${hue(user)}, 85%, 40%`,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,11 +7,7 @@ import {
|
|||
DropdownToggle,
|
||||
} from '@/features/ui/components/bootstrap-5/dropdown-menu'
|
||||
import OLTooltip from '@/features/ui/components/ol/ol-tooltip'
|
||||
import {
|
||||
getBackgroundColorForUserId,
|
||||
hslStringToLuminance,
|
||||
} from '@/shared/utils/colors'
|
||||
import classNames from 'classnames'
|
||||
import { getBackgroundColorForUserId } from '@/shared/utils/colors'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
|
@ -90,16 +86,9 @@ const OnlineUserWidget = ({
|
|||
|
||||
const OnlineUserCircle = ({ user }: { user: OnlineUser }) => {
|
||||
const backgroundColor = getBackgroundColorForUserId(user.user_id)
|
||||
const luminance = hslStringToLuminance(backgroundColor)
|
||||
const [character] = [...user.name]
|
||||
return (
|
||||
<span
|
||||
className={classNames('online-user-circle', {
|
||||
'online-user-circle-light-font': luminance < 0.5,
|
||||
'online-user-circle-dark-font': luminance >= 0.5,
|
||||
})}
|
||||
style={{ backgroundColor }}
|
||||
>
|
||||
<span className="online-user-circle" style={{ backgroundColor }}>
|
||||
{character}
|
||||
</span>
|
||||
)
|
||||
|
|
|
@ -20,7 +20,6 @@ import Footer from '@/features/ui/components/bootstrap-5/footer/footer'
|
|||
import SidebarDsNav from '@/features/project-list/components/sidebar/sidebar-ds-nav'
|
||||
import SystemMessages from '@/shared/components/system-messages'
|
||||
import overleafLogo from '@/shared/svgs/overleaf-a-ds-solution-mallard.svg'
|
||||
import CookieBanner from '@/shared/components/cookie-banner'
|
||||
|
||||
export function ProjectListDsNav() {
|
||||
const navbarProps = getMeta('ol-navbar')
|
||||
|
@ -126,7 +125,6 @@ export function ProjectListDsNav() {
|
|||
</div>
|
||||
<Footer {...footerProps} />
|
||||
</div>
|
||||
<CookieBanner />
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
|
|
|
@ -18,7 +18,6 @@ import Footer from '@/features/ui/components/bootstrap-5/footer/footer'
|
|||
import WelcomePageContent from '@/features/project-list/components/welcome-page-content'
|
||||
import { ProjectListDsNav } from '@/features/project-list/components/project-list-ds-nav'
|
||||
import { DsNavStyleProvider } from '@/features/project-list/components/use-is-ds-nav'
|
||||
import CookieBanner from '@/shared/components/cookie-banner'
|
||||
|
||||
function ProjectListRoot() {
|
||||
const { isReady } = useWaitForI18n()
|
||||
|
@ -89,12 +88,9 @@ function ProjectListPageContent() {
|
|||
|
||||
if (totalProjectsCount === 0) {
|
||||
return (
|
||||
<>
|
||||
<DefaultPageContentWrapper>
|
||||
<WelcomePageContent />
|
||||
</DefaultPageContentWrapper>
|
||||
<CookieBanner />
|
||||
</>
|
||||
<DefaultPageContentWrapper>
|
||||
<WelcomePageContent />
|
||||
</DefaultPageContentWrapper>
|
||||
)
|
||||
}
|
||||
return (
|
||||
|
|
|
@ -16,9 +16,12 @@ const EditorManageTemplateModalWrapper = React.memo(
|
|||
handleHide,
|
||||
openTemplate,
|
||||
}: EditorManageTemplateModalWrapperProps) {
|
||||
const { project } = useProjectContext()
|
||||
const {
|
||||
_id: projectId,
|
||||
name: projectName,
|
||||
} = useProjectContext()
|
||||
|
||||
if (!project) {
|
||||
if (!projectName) {
|
||||
// wait for useProjectContext
|
||||
return null
|
||||
}
|
||||
|
@ -27,8 +30,8 @@ const EditorManageTemplateModalWrapper = React.memo(
|
|||
handleHide={handleHide}
|
||||
show={show}
|
||||
handleAfterPublished={openTemplate}
|
||||
projectId={project._id}
|
||||
projectName={project.name}
|
||||
projectId={projectId}
|
||||
projectName={projectName}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
import OLButton from '@/features/ui/components/ol/ol-button'
|
||||
import { Trans, useTranslation } from 'react-i18next'
|
||||
import React, { useState } from 'react'
|
||||
import {
|
||||
CookieConsentValue,
|
||||
cookieBannerRequired,
|
||||
hasMadeCookieChoice,
|
||||
setConsent,
|
||||
} from '@/features/cookie-banner/utils'
|
||||
|
||||
function CookieBanner() {
|
||||
const { t } = useTranslation()
|
||||
const [hidden, setHidden] = useState(
|
||||
() => !cookieBannerRequired() || hasMadeCookieChoice()
|
||||
)
|
||||
|
||||
function makeCookieChoice(value: CookieConsentValue) {
|
||||
setConsent(value)
|
||||
setHidden(true)
|
||||
}
|
||||
|
||||
if (hidden) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<section
|
||||
className="cookie-banner hidden-print"
|
||||
aria-label={t('cookie_banner')}
|
||||
>
|
||||
<div className="cookie-banner-content">
|
||||
<Trans
|
||||
i18nKey="cookie_banner_info"
|
||||
// eslint-disable-next-line react/jsx-key, jsx-a11y/anchor-has-content
|
||||
components={[<a href="/legal#Cookies" />]}
|
||||
/>
|
||||
</div>
|
||||
<div className="cookie-banner-actions">
|
||||
<OLButton
|
||||
variant="link"
|
||||
size="sm"
|
||||
onClick={() => makeCookieChoice('essential')}
|
||||
>
|
||||
{t('essential_cookies_only')}
|
||||
</OLButton>
|
||||
<OLButton
|
||||
variant="primary"
|
||||
size="sm"
|
||||
onClick={() => makeCookieChoice('all')}
|
||||
>
|
||||
{t('accept_all_cookies')}
|
||||
</OLButton>
|
||||
</div>
|
||||
</section>
|
||||
)
|
||||
}
|
||||
|
||||
export default CookieBanner
|
|
@ -34,51 +34,6 @@ export function getBackgroundColorForUserId(userId?: string) {
|
|||
return `hsl(${getHueForUserId(userId)}, 70%, 50%)`
|
||||
}
|
||||
|
||||
export function hslStringToLuminance(hslString: string): number {
|
||||
// First extract the individual components from the HSL string
|
||||
const hslSplit = hslString.slice(4).split(')')[0].split(',')
|
||||
|
||||
const h = Number(hslSplit[0])
|
||||
const s = Number(hslSplit[1].slice(0, -1)) / 100
|
||||
const l = Number(hslSplit[2].slice(0, -1)) / 100
|
||||
|
||||
// Then we need to convert HSL to RGB
|
||||
const c = (1 - Math.abs(2 * l - 1)) * s
|
||||
const x = c * (1 - Math.abs(((h / 60) % 2) - 1))
|
||||
const m = l - c / 2
|
||||
let r = 0
|
||||
let g = 0
|
||||
let b = 0
|
||||
if (h >= 0 && h < 60) {
|
||||
r = c + m
|
||||
g = x + m
|
||||
b = m
|
||||
} else if (h >= 60 && h < 120) {
|
||||
r = x + m
|
||||
g = c + m
|
||||
b = m
|
||||
} else if (h >= 120 && h < 180) {
|
||||
r = m
|
||||
g = c + m
|
||||
b = x + m
|
||||
} else if (h >= 180 && h < 240) {
|
||||
r = m
|
||||
g = x + m
|
||||
b = c + m
|
||||
} else if (h >= 240 && h < 300) {
|
||||
r = x + m
|
||||
g = m
|
||||
b = c + m
|
||||
} else if (h >= 300 && h < 360) {
|
||||
r = c + m
|
||||
g = m
|
||||
b = x + m
|
||||
}
|
||||
|
||||
// Finally we calculate the luminance
|
||||
return 0.2126 * r + 0.7152 * g + 0.0722 * b
|
||||
}
|
||||
|
||||
const cachedHues = new Map()
|
||||
|
||||
export function getHueForId(id: string) {
|
||||
|
|
|
@ -124,12 +124,4 @@
|
|||
box-sizing: border-box;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.online-user-circle-light-font {
|
||||
color: var(--content-primary-dark);
|
||||
}
|
||||
|
||||
.online-user-circle-dark-font {
|
||||
color: var(--content-primary);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -524,10 +524,6 @@ $z-index-group-member-picker-list: 1;
|
|||
&[data-ol-plans-new-group-member-picker-button='group-all'] {
|
||||
height: $group-member-picker-top-height;
|
||||
}
|
||||
|
||||
.material-symbols {
|
||||
pointer-events: none;
|
||||
}
|
||||
}
|
||||
|
||||
ul.plans-new-group-member-picker-list {
|
||||
|
|
|
@ -255,12 +255,6 @@
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
> * {
|
||||
@include media-breakpoint-up(md) {
|
||||
border-left: 1px solid var(--border-divider);
|
||||
}
|
||||
}
|
||||
|
||||
.project-ds-nav-content {
|
||||
flex-grow: 1;
|
||||
overflow-y: auto;
|
||||
|
@ -269,20 +263,10 @@
|
|||
|
||||
@include media-breakpoint-up(md) {
|
||||
border-top-left-radius: var(--border-radius-large);
|
||||
border-left: 1px solid var(--border-divider);
|
||||
border-top: 1px solid var(--border-divider);
|
||||
}
|
||||
}
|
||||
|
||||
.cookie-banner {
|
||||
position: static;
|
||||
background-color: var(--bg-light-primary);
|
||||
|
||||
// Remove the parts of the shadow that stick out of the sides
|
||||
clip-path: inset(-13px 0 0 0);
|
||||
|
||||
// Prevent the cookie banner being overlaid on top of the navigation
|
||||
z-index: auto;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@
|
|||
"about_to_trash_projects": "You are about to trash the following projects:",
|
||||
"abstract": "Abstract",
|
||||
"accept": "Accept",
|
||||
"accept_all_cookies": "Accept all cookies",
|
||||
"accept_and_continue": "Accept and continue",
|
||||
"accept_change": "Accept change",
|
||||
"accept_change_error_description": "There was an error accepting a track change. Please try again in a few moments.",
|
||||
|
@ -436,8 +435,6 @@
|
|||
"continue_using_free_features": "Continue using our free features",
|
||||
"continue_with_free_plan": "Continue with free plan",
|
||||
"continue_with_service": "Continue with __service__",
|
||||
"cookie_banner": "Cookie banner",
|
||||
"cookie_banner_info": "We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our <0>cookie policy</0>.",
|
||||
"copied": "Copied",
|
||||
"copy": "Copy",
|
||||
"copy_code": "Copy code",
|
||||
|
@ -618,7 +615,6 @@
|
|||
"dropbox_synced": "Overleaf and Dropbox have processed all updates. Note that your local Dropbox might still be synchronizing",
|
||||
"dropbox_unlinked_because_access_denied": "Your Dropbox account has been unlinked because the Dropbox service rejected your stored credentials. Please relink your Dropbox account to continue using it with Overleaf.",
|
||||
"dropbox_unlinked_because_full": "Your Dropbox account has been unlinked because it is full, and we can no longer send updates to it. Please free up some space and relink your Dropbox account to continue using it with Overleaf.",
|
||||
"dropbox_unlinked_because_suspended": "We’ve unlinked your Dropbox account because it’s been suspended by Dropbox. You’ll be able to relink once you’ve resolved the issue with Dropbox.",
|
||||
"dropbox_unlinked_premium_feature": "<0>Your Dropbox account has been unlinked</0> because Dropbox Sync is a premium feature that you had through an institutional license.",
|
||||
"due_date": "Due __date__",
|
||||
"due_today": "Due today",
|
||||
|
@ -708,7 +704,6 @@
|
|||
"error_performing_request": "An error has occurred while performing your request.",
|
||||
"error_processing_file": "Sorry, something went wrong processing this file. Please try again.",
|
||||
"es": "Spanish",
|
||||
"essential_cookies_only": "Essential cookies only",
|
||||
"estimated_number_of_overleaf_users": "Estimated number of __appName__ users",
|
||||
"every": "per",
|
||||
"everything_in_free_plus": "Everything in Free, plus…",
|
||||
|
@ -1117,7 +1112,6 @@
|
|||
"invalid_password_too_similar": "Password is too similar to parts of email address",
|
||||
"invalid_regular_expression": "Invalid regular expression",
|
||||
"invalid_request": "Invalid Request. Please correct the data and try again.",
|
||||
"invalid_upload_request": "The upload failed. If the problem persists, <0>let us know</0>.",
|
||||
"invalid_zip_file": "Invalid zip file",
|
||||
"invite": "Invite",
|
||||
"invite_expired": "The invite may have expired",
|
||||
|
|
|
@ -8,6 +8,7 @@ import _ from 'lodash'
|
|||
import ProjectGetter from '../../../../../app/src/Features/Project/ProjectGetter.js'
|
||||
import User from '../../../../../test/acceptance/src/helpers/User.mjs'
|
||||
import MockDocUpdaterApiClass from '../../../../../test/acceptance/src/mocks/MockDocUpdaterApi.mjs'
|
||||
import Features from '../../../../../app/src/infrastructure/Features.js'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
|
@ -187,25 +188,32 @@ describe('ProjectStructureChanges', function () {
|
|||
const cases = [
|
||||
{
|
||||
label: 'with filestore disabled and project-history-blobs enabled',
|
||||
filestoreMigrationLevel: 2,
|
||||
disableFilestore: true,
|
||||
enableProjectHistoryBlobs: true,
|
||||
},
|
||||
{
|
||||
label: 'with filestore enabled and project-history-blobs enabled',
|
||||
filestoreMigrationLevel: 1,
|
||||
disableFilestore: false,
|
||||
enableProjectHistoryBlobs: true,
|
||||
},
|
||||
{
|
||||
label: 'with filestore enabled and project-history-blobs disabled',
|
||||
filestoreMigrationLevel: 0,
|
||||
disableFilestore: false,
|
||||
enableProjectHistoryBlobs: false,
|
||||
},
|
||||
]
|
||||
for (const { label, filestoreMigrationLevel } of cases) {
|
||||
for (const { label, disableFilestore, enableProjectHistoryBlobs } of cases) {
|
||||
describe(label, function () {
|
||||
const previousFilestoreMigrationLevel = Settings.filestoreMigrationLevel
|
||||
const previousDisableFilestore = Settings.disableFilestore
|
||||
const previousEnableProjectHistoryBlobs =
|
||||
Settings.enableProjectHistoryBlobs
|
||||
beforeEach(function () {
|
||||
Settings.filestoreMigrationLevel = filestoreMigrationLevel
|
||||
Settings.disableFilestore = disableFilestore
|
||||
Settings.enableProjectHistoryBlobs = enableProjectHistoryBlobs
|
||||
})
|
||||
afterEach(function () {
|
||||
Settings.filestoreMigrationLevel = previousFilestoreMigrationLevel
|
||||
Settings.disableFilestore = previousDisableFilestore
|
||||
Settings.enableProjectHistoryBlobs = previousEnableProjectHistoryBlobs
|
||||
})
|
||||
|
||||
describe('creating a project from the example template', function () {
|
||||
|
@ -236,7 +244,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[2].type).to.equal('add-file')
|
||||
expect(updates[2].userId).to.equal(owner._id)
|
||||
expect(updates[2].pathname).to.equal('/frog.jpg')
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
if (disableFilestore) {
|
||||
expect(updates[2].url).to.not.exist
|
||||
expect(updates[2].createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -293,10 +301,10 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[2].type).to.equal('add-file')
|
||||
expect(updates[2].userId).to.equal(owner._id)
|
||||
expect(updates[2].pathname).to.equal('/frog.jpg')
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
if (disableFilestore) {
|
||||
expect(updates[2].url).to.not.exist
|
||||
expect(updates[2].createdBlob).to.be.true
|
||||
} else if (filestoreMigrationLevel === 1) {
|
||||
} else if (Features.hasFeature('project-history-blobs')) {
|
||||
expect(updates[2].url).to.be.null
|
||||
} else {
|
||||
expect(updates[2].url).to.be.a('string')
|
||||
|
@ -370,7 +378,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[1].type).to.equal('add-file')
|
||||
expect(updates[1].userId).to.equal(owner._id)
|
||||
expect(updates[1].pathname).to.equal('/1pixel.png')
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
if (disableFilestore) {
|
||||
expect(updates[1].url).to.not.exist
|
||||
expect(updates[1].createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -470,7 +478,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(update.type).to.equal('add-file')
|
||||
expect(update.userId).to.equal(owner._id)
|
||||
expect(update.pathname).to.equal('/1pixel.png')
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
if (disableFilestore) {
|
||||
expect(update.url).to.not.exist
|
||||
expect(update.createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -508,7 +516,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[1].type).to.equal('add-file')
|
||||
expect(updates[1].userId).to.equal(owner._id)
|
||||
expect(updates[1].pathname).to.equal('/1pixel.png')
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
if (disableFilestore) {
|
||||
expect(updates[1].url).to.not.exist
|
||||
expect(updates[1].createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -997,7 +1005,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(update.type).to.equal('add-file')
|
||||
expect(update.userId).to.equal(owner._id)
|
||||
expect(update.pathname).to.equal('/1pixel.png')
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
if (disableFilestore) {
|
||||
expect(update.url).to.not.exist
|
||||
expect(update.createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -1060,7 +1068,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[1].type).to.equal('add-file')
|
||||
expect(updates[1].userId).to.equal(owner._id)
|
||||
expect(updates[1].pathname).to.equal('/1pixel.png')
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
if (disableFilestore) {
|
||||
expect(updates[1].url).to.not.exist
|
||||
expect(updates[1].createdBlob).to.be.true
|
||||
} else {
|
||||
|
|
|
@ -74,7 +74,6 @@ async function main() {
|
|||
'dropbox_email_not_verified',
|
||||
'dropbox_unlinked_because_access_denied',
|
||||
'dropbox_unlinked_because_full',
|
||||
'dropbox_unlinked_because_suspended',
|
||||
|
||||
// Actually used without the spurious space.
|
||||
// TODO: fix the space and upload the changed locales
|
||||
|
|
|
@ -69,7 +69,10 @@ describe('PrimaryEmailCheck', function () {
|
|||
$set: { lastPrimaryEmailCheck: new Date(time) },
|
||||
})
|
||||
|
||||
await userHelper.confirmEmail(userHelper.user.email)
|
||||
await userHelper.confirmEmail(
|
||||
userHelper.user._id,
|
||||
userHelper.user.email
|
||||
)
|
||||
})
|
||||
|
||||
it("shouldn't be redirected from project list to the primary email check page", async function () {
|
||||
|
@ -150,7 +153,10 @@ describe('PrimaryEmailCheck', function () {
|
|||
$set: { lastPrimaryEmailCheck: new Date(time) },
|
||||
})
|
||||
|
||||
await userHelper.confirmEmail(userHelper.user.email)
|
||||
await userHelper.confirmEmail(
|
||||
userHelper.user._id,
|
||||
userHelper.user.email
|
||||
)
|
||||
})
|
||||
|
||||
it("shouldn't be redirected from project list to the primary email check page", async function () {
|
||||
|
@ -213,8 +219,14 @@ describe('PrimaryEmailCheck', function () {
|
|||
})
|
||||
|
||||
beforeEach(async function () {
|
||||
await userHelper.confirmEmail(userHelper.user.email)
|
||||
await userHelper.addEmailAndConfirm('secondary@overleaf.com')
|
||||
await userHelper.confirmEmail(
|
||||
userHelper.user._id,
|
||||
userHelper.user.email
|
||||
)
|
||||
await userHelper.addEmailAndConfirm(
|
||||
userHelper.user._id,
|
||||
'secondary@overleaf.com'
|
||||
)
|
||||
|
||||
checkResponse = await userHelper.fetch(
|
||||
'/user/emails/primary-email-check',
|
||||
|
|
|
@ -138,47 +138,6 @@ describe('ProjectStructureChanges', function () {
|
|||
})
|
||||
})
|
||||
|
||||
describe('when sending an upload request without a file', function () {
|
||||
describe('project', function () {
|
||||
it('should reject the request with status 400', async function () {
|
||||
const { response, body } = await owner.doRequest('POST', {
|
||||
uri: 'project/new/upload',
|
||||
json: true,
|
||||
formData: {
|
||||
name: 'foo',
|
||||
},
|
||||
})
|
||||
|
||||
expect(response.statusCode).to.equal(400)
|
||||
expect(body).to.deep.equal({
|
||||
success: false,
|
||||
error: 'invalid_upload_request',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('file', function () {
|
||||
it('should reject the request with status 400', async function () {
|
||||
const projectId = await owner.createProject('foo', {
|
||||
template: 'blank',
|
||||
})
|
||||
const { response, body } = await owner.doRequest('POST', {
|
||||
uri: `project/${projectId}/upload`,
|
||||
json: true,
|
||||
formData: {
|
||||
name: 'foo.txt',
|
||||
},
|
||||
})
|
||||
|
||||
expect(response.statusCode).to.equal(400)
|
||||
expect(body).to.deep.equal({
|
||||
success: false,
|
||||
error: 'invalid_upload_request',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('uploading an empty zipfile', function () {
|
||||
let res
|
||||
|
||||
|
|
|
@ -162,7 +162,7 @@ class UserHelper {
|
|||
|
||||
/**
|
||||
*
|
||||
* @param {'pendingExistingEmail'|'pendingUserRegistration'|'pendingSecondaryEmail'}sessionKey
|
||||
* @param {'pendingExistingEmail'|'pendingUserRegistration'}sessionKey
|
||||
* @return {Promise<*>}
|
||||
*/
|
||||
async getEmailConfirmationCode(sessionKey) {
|
||||
|
@ -431,16 +431,16 @@ class UserHelper {
|
|||
}
|
||||
|
||||
async addEmail(email) {
|
||||
const response = await this.fetch('/user/emails/secondary', {
|
||||
const response = await this.fetch('/user/emails', {
|
||||
method: 'POST',
|
||||
body: new URLSearchParams([['email', email]]),
|
||||
})
|
||||
await throwIfErrorResponse(response)
|
||||
}
|
||||
|
||||
async addEmailAndConfirm(email) {
|
||||
async addEmailAndConfirm(userId, email) {
|
||||
await this.addEmail(email)
|
||||
await this.confirmSecondaryEmail()
|
||||
await this.confirmEmail(userId, email)
|
||||
}
|
||||
|
||||
async changeConfirmationDate(userId, email, date) {
|
||||
|
@ -499,9 +499,9 @@ class UserHelper {
|
|||
await this.changeConfirmationDate(userId, email, date)
|
||||
}
|
||||
|
||||
async confirmEmail(email) {
|
||||
async confirmEmail(userId, email) {
|
||||
// clear ratelimiting on resend confirmation endpoint
|
||||
await rateLimiters.sendConfirmation.delete(this.user._id)
|
||||
await rateLimiters.sendConfirmation.delete(userId)
|
||||
const requestConfirmationCode = await this.fetch(
|
||||
'/user/emails/send-confirmation-code',
|
||||
{
|
||||
|
@ -517,25 +517,6 @@ class UserHelper {
|
|||
})
|
||||
await throwIfErrorResponse(requestConfirmCode)
|
||||
}
|
||||
|
||||
async confirmSecondaryEmail() {
|
||||
const code = await this.getEmailConfirmationCode('pendingSecondaryEmail')
|
||||
const requestConfirmCode = await this.fetch(
|
||||
'/user/emails/confirm-secondary',
|
||||
{
|
||||
method: 'POST',
|
||||
body: new URLSearchParams({ code }),
|
||||
}
|
||||
)
|
||||
await throwIfErrorResponse(requestConfirmCode)
|
||||
}
|
||||
|
||||
async unconfirmEmail(email) {
|
||||
await UserUpdater.promises.updateUser(
|
||||
{ _id: this.user._id, 'emails.email': email.toLowerCase() },
|
||||
{ $unset: { 'emails.$.confirmedAt': 1, 'emails.$.reconfirmedAt': 1 } }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export default UserHelper
|
||||
|
|
|
@ -1500,331 +1500,4 @@ describe('AuthenticationController', function () {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkCredentials', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap = new Map()
|
||||
this.logger.err = sinon.stub()
|
||||
this.Metrics.inc = sinon.stub()
|
||||
})
|
||||
|
||||
describe('with valid credentials', function () {
|
||||
describe('single password', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', 'correctpassword')
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'correctpassword'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return true', function () {
|
||||
this.result.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not log an error', function () {
|
||||
this.logger.err.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should record success metrics', function () {
|
||||
this.Metrics.inc.should.have.been.calledWith(
|
||||
'security.http-auth.check-credentials',
|
||||
1,
|
||||
{
|
||||
path: 'known-user',
|
||||
status: 'pass',
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('array with primary password', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', ['primary', 'fallback'])
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'primary'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return true', function () {
|
||||
this.result.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not log an error', function () {
|
||||
this.logger.err.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should record success metrics', function () {
|
||||
this.Metrics.inc.should.have.been.calledWith(
|
||||
'security.http-auth.check-credentials',
|
||||
1,
|
||||
{
|
||||
path: 'known-user',
|
||||
status: 'pass',
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('array with fallback password', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', ['primary', 'fallback'])
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'fallback'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return true', function () {
|
||||
this.result.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not log an error', function () {
|
||||
this.logger.err.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should record success metrics', function () {
|
||||
this.Metrics.inc.should.have.been.calledWith(
|
||||
'security.http-auth.check-credentials',
|
||||
1,
|
||||
{
|
||||
path: 'known-user',
|
||||
status: 'pass',
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with invalid credentials', function () {
|
||||
describe('unknown user', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', 'correctpassword')
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'unknownuser',
|
||||
'anypassword'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return false', function () {
|
||||
this.result.should.equal(false)
|
||||
})
|
||||
|
||||
it('should log an error', function () {
|
||||
this.logger.err.should.have.been.calledWith(
|
||||
{ user: 'unknownuser' },
|
||||
'invalid login details'
|
||||
)
|
||||
})
|
||||
|
||||
it('should record failure metrics', function () {
|
||||
this.Metrics.inc.should.have.been.calledWith(
|
||||
'security.http-auth.check-credentials',
|
||||
1,
|
||||
{
|
||||
path: 'unknown-user',
|
||||
status: 'fail',
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('wrong password', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', 'correctpassword')
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'wrongpassword'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return false', function () {
|
||||
this.result.should.equal(false)
|
||||
})
|
||||
|
||||
it('should log an error', function () {
|
||||
this.logger.err.should.have.been.calledWith(
|
||||
{ user: 'testuser' },
|
||||
'invalid login details'
|
||||
)
|
||||
})
|
||||
|
||||
it('should record failure metrics', function () {
|
||||
this.Metrics.inc.should.have.been.calledWith(
|
||||
'security.http-auth.check-credentials',
|
||||
1,
|
||||
{
|
||||
path: 'known-user',
|
||||
status: 'fail',
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('wrong password with array', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', ['primary', 'fallback'])
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'wrongpassword'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return false', function () {
|
||||
this.result.should.equal(false)
|
||||
})
|
||||
|
||||
it('should log an error', function () {
|
||||
this.logger.err.should.have.been.calledWith(
|
||||
{ user: 'testuser' },
|
||||
'invalid login details'
|
||||
)
|
||||
})
|
||||
|
||||
it('should record failure metrics', function () {
|
||||
this.Metrics.inc.should.have.been.calledWith(
|
||||
'security.http-auth.check-credentials',
|
||||
1,
|
||||
{
|
||||
path: 'known-user',
|
||||
status: 'fail',
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('null user entry', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', null)
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'anypassword'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return false', function () {
|
||||
this.result.should.equal(false)
|
||||
})
|
||||
|
||||
it('should log an error', function () {
|
||||
this.logger.err.should.have.been.calledWith(
|
||||
{ user: 'testuser' },
|
||||
'invalid login details'
|
||||
)
|
||||
})
|
||||
|
||||
it('should record failure metrics for unknown user', function () {
|
||||
this.Metrics.inc.should.have.been.calledWith(
|
||||
'security.http-auth.check-credentials',
|
||||
1,
|
||||
{
|
||||
path: 'unknown-user',
|
||||
status: 'fail',
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('empty primary password in array', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', ['', 'fallback'])
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'fallback'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return true with fallback password', function () {
|
||||
this.result.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not log an error', function () {
|
||||
this.logger.err.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('empty fallback password in array', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', ['primary', ''])
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'primary'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return true with primary password', function () {
|
||||
this.result.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not log an error', function () {
|
||||
this.logger.err.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('both passwords empty in array', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', ['', ''])
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'anypassword'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return false', function () {
|
||||
this.result.should.equal(false)
|
||||
})
|
||||
|
||||
it('should log an error', function () {
|
||||
this.logger.err.should.have.been.calledWith(
|
||||
{ user: 'testuser' },
|
||||
'invalid login details'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('empty single password', function () {
|
||||
beforeEach(function () {
|
||||
this.userDetailsMap.set('testuser', '')
|
||||
this.result = this.AuthenticationController.checkCredentials(
|
||||
this.userDetailsMap,
|
||||
'testuser',
|
||||
'anypassword'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return false', function () {
|
||||
this.result.should.equal(false)
|
||||
})
|
||||
|
||||
it('should log an error', function () {
|
||||
this.logger.err.should.have.been.calledWith(
|
||||
{ user: 'testuser' },
|
||||
'invalid login details'
|
||||
)
|
||||
})
|
||||
|
||||
it('should record failure metrics for unknown user', function () {
|
||||
this.Metrics.inc.should.have.been.calledWith(
|
||||
'security.http-auth.check-credentials',
|
||||
1,
|
||||
{
|
||||
path: 'unknown-user',
|
||||
status: 'fail',
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -29,7 +29,6 @@ describe('DocumentUpdaterHandler', function () {
|
|||
url: 'http://project_history.example.com',
|
||||
},
|
||||
},
|
||||
filestoreMigrationLevel: 0,
|
||||
moduleImportSequence: [],
|
||||
}
|
||||
this.source = 'dropbox'
|
||||
|
@ -1492,7 +1491,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
|
||||
describe('with filestore disabled', function () {
|
||||
beforeEach(function () {
|
||||
this.settings.filestoreMigrationLevel = 2
|
||||
this.settings.disableFilestore = true
|
||||
})
|
||||
it('should add files without URL and with createdBlob', async function () {
|
||||
this.fileId = new ObjectId()
|
||||
|
@ -1701,7 +1700,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
})
|
||||
describe('with filestore disabled', function () {
|
||||
beforeEach(function () {
|
||||
this.settings.filestoreMigrationLevel = 2
|
||||
this.settings.disableFilestore = true
|
||||
})
|
||||
it('should add files without URL', async function () {
|
||||
const fileId1 = new ObjectId()
|
||||
|
|
|
@ -87,14 +87,6 @@ describe('DocumentController', function () {
|
|||
},
|
||||
}
|
||||
|
||||
ctx.Modules = {
|
||||
promises: {
|
||||
hooks: {
|
||||
fire: sinon.stub().resolves(),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({
|
||||
default: ctx.ProjectGetter,
|
||||
}))
|
||||
|
@ -121,10 +113,6 @@ describe('DocumentController', function () {
|
|||
default: ctx.ChatApiHandler,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../../app/src/infrastructure/Modules.js', () => ({
|
||||
default: ctx.Modules,
|
||||
}))
|
||||
|
||||
ctx.DocumentController = (await import(MODULE_PATH)).default
|
||||
})
|
||||
|
||||
|
@ -220,15 +208,6 @@ describe('DocumentController', function () {
|
|||
it('should return a successful response', function (ctx) {
|
||||
ctx.res.success.should.equal(true)
|
||||
})
|
||||
|
||||
it('should call the docModified hook', function (ctx) {
|
||||
sinon.assert.calledWith(
|
||||
ctx.Modules.promises.hooks.fire,
|
||||
'docModified',
|
||||
ctx.project._id,
|
||||
ctx.doc._id
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the document doesn't exist", function () {
|
||||
|
|
|
@ -50,7 +50,7 @@ describe('ReferencesHandler', function () {
|
|||
filestore: { url: 'http://some.url/filestore' },
|
||||
project_history: { url: 'http://project-history.local' },
|
||||
},
|
||||
filestoreMigrationLevel: 2,
|
||||
enableProjectHistoryBlobs: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@ describe('SplitTestHandler', function () {
|
|||
}
|
||||
this.SplitTestCache.get.resolves(this.cachedSplitTests)
|
||||
this.Settings = {
|
||||
filestoreMigrationLevel: 0,
|
||||
moduleImportSequence: [],
|
||||
overleaf: {},
|
||||
devToolbar: {
|
||||
|
|
|
@ -57,7 +57,7 @@ describe('TpdsUpdateSender', function () {
|
|||
url: projectHistoryUrl,
|
||||
},
|
||||
},
|
||||
filestoreMigrationLevel: true,
|
||||
enableProjectHistoryBlobs: true,
|
||||
}
|
||||
const getUsers = sinon.stub()
|
||||
getUsers
|
||||
|
|
|
@ -7,7 +7,6 @@ describe('Features', function () {
|
|||
this.Features = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': (this.settings = {
|
||||
filestoreMigrationLevel: 0,
|
||||
moduleImportSequence: [],
|
||||
enabledLinkedFileTypes: [],
|
||||
}),
|
||||
|
|
|
@ -1,10 +1,3 @@
|
|||
export type AdminCapability = 'modify-user-email' | 'view-project'
|
||||
|
||||
export type AdminRole =
|
||||
| 'engagement'
|
||||
| 'engineering'
|
||||
| 'finance'
|
||||
| 'product'
|
||||
| 'sales'
|
||||
| 'support'
|
||||
| 'support_tier_1'
|
||||
export type AdminRole = 'engineering'
|
||||
|
|
|
@ -53,10 +53,7 @@ export type WebModule = {
|
|||
apply: (webRouter: any, privateApiRouter: any, publicApiRouter: any) => void
|
||||
}
|
||||
hooks?: {
|
||||
promises?: {
|
||||
[name: string]: (...args: any[]) => Promise<any>
|
||||
}
|
||||
[name: string]: ((...args: any[]) => void) | any
|
||||
[name: string]: (args: any[]) => void
|
||||
}
|
||||
middleware?: {
|
||||
[name: string]: RequestHandler
|
||||
|
|
|
@ -27,6 +27,5 @@ declare global {
|
|||
gtag?: (...args: any) => void
|
||||
|
||||
propensity?: (propensityId?: string) => void
|
||||
olLoadGA?: () => void
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue