mirror of
https://github.com/yu-i-i/overleaf-cep.git
synced 2025-07-23 05:00:07 +02:00
Compare commits
28 commits
d32eb2b449
...
51f2f5b8db
Author | SHA1 | Date | |
---|---|---|---|
![]() |
51f2f5b8db | ||
![]() |
c0d3cb3622 | ||
![]() |
9ece5d0dad | ||
![]() |
097aeaaf3a | ||
![]() |
3b3c9e7305 | ||
![]() |
3736a0c27d | ||
![]() |
2e35bfe14f | ||
![]() |
c59d1e1780 | ||
![]() |
492917502c | ||
![]() |
e24b15ef2f | ||
![]() |
360918956f | ||
![]() |
177b57590e | ||
![]() |
42f446be67 | ||
![]() |
fcd181e12c | ||
![]() |
a0a4a9d518 | ||
![]() |
0546fb7233 | ||
![]() |
b1880ba64d | ||
![]() |
082121d3da | ||
![]() |
81f0807fc6 | ||
![]() |
bf43d4f709 | ||
![]() |
ae3f63d37f | ||
![]() |
30b0cabbbc | ||
![]() |
2f427ef0e0 | ||
![]() |
0778bab910 | ||
![]() |
d5b5710d01 | ||
![]() |
868d562d96 | ||
![]() |
5d79cf18c0 | ||
![]() |
7ecee2e0aa |
114 changed files with 2687 additions and 566 deletions
31
package-lock.json
generated
31
package-lock.json
generated
|
@ -35581,6 +35581,7 @@
|
|||
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
|
||||
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
|
||||
"deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"aws-sign2": "~0.7.0",
|
||||
"aws4": "^1.8.0",
|
||||
|
@ -35638,15 +35639,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/request/node_modules/tough-cookie": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
|
||||
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz",
|
||||
"integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
"tldts": "^6.1.32"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/requestretry": {
|
||||
|
@ -39612,6 +39613,24 @@
|
|||
"tlds": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/tldts": {
|
||||
"version": "6.1.86",
|
||||
"resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.86.tgz",
|
||||
"integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tldts-core": "^6.1.86"
|
||||
},
|
||||
"bin": {
|
||||
"tldts": "bin/cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/tldts-core": {
|
||||
"version": "6.1.86",
|
||||
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.86.tgz",
|
||||
"integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tmp": {
|
||||
"version": "0.2.3",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz",
|
||||
|
|
|
@ -33,6 +33,9 @@
|
|||
"path-to-regexp": "3.3.0",
|
||||
"body-parser": "1.20.3",
|
||||
"multer": "2.0.1"
|
||||
},
|
||||
"request@2.88.2": {
|
||||
"tough-cookie": "5.1.2"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
|
|
23
patches/@node-saml+node-saml+4.0.5.patch
Normal file
23
patches/@node-saml+node-saml+4.0.5.patch
Normal file
|
@ -0,0 +1,23 @@
|
|||
diff --git a/node_modules/@node-saml/node-saml/lib/saml.js b/node_modules/@node-saml/node-saml/lib/saml.js
|
||||
index fba15b9..a5778cb 100644
|
||||
--- a/node_modules/@node-saml/node-saml/lib/saml.js
|
||||
+++ b/node_modules/@node-saml/node-saml/lib/saml.js
|
||||
@@ -336,7 +336,8 @@ class SAML {
|
||||
const requestOrResponse = request || response;
|
||||
(0, utility_1.assertRequired)(requestOrResponse, "either request or response is required");
|
||||
let buffer;
|
||||
- if (this.options.skipRequestCompression) {
|
||||
+ // logout requestOrResponse must be compressed anyway
|
||||
+ if (this.options.skipRequestCompression && operation !== "logout") {
|
||||
buffer = Buffer.from(requestOrResponse, "utf8");
|
||||
}
|
||||
else {
|
||||
@@ -495,7 +496,7 @@ class SAML {
|
||||
try {
|
||||
xml = Buffer.from(container.SAMLResponse, "base64").toString("utf8");
|
||||
doc = await (0, xml_1.parseDomFromString)(xml);
|
||||
- const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response']/@InResponseTo");
|
||||
+ const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response' or local-name()='LogoutResponse']/@InResponseTo");
|
||||
if (inResponseToNodes) {
|
||||
inResponseTo = inResponseToNodes.length ? inResponseToNodes[0].nodeValue : null;
|
||||
await this.validateInResponseTo(inResponseTo);
|
64
patches/ldapauth-fork+4.3.3.patch
Normal file
64
patches/ldapauth-fork+4.3.3.patch
Normal file
|
@ -0,0 +1,64 @@
|
|||
diff --git a/node_modules/ldapauth-fork/lib/ldapauth.js b/node_modules/ldapauth-fork/lib/ldapauth.js
|
||||
index 85ecf36a8b..a7d07e0f78 100644
|
||||
--- a/node_modules/ldapauth-fork/lib/ldapauth.js
|
||||
+++ b/node_modules/ldapauth-fork/lib/ldapauth.js
|
||||
@@ -69,6 +69,7 @@ function LdapAuth(opts) {
|
||||
this.opts.bindProperty || (this.opts.bindProperty = 'dn');
|
||||
this.opts.groupSearchScope || (this.opts.groupSearchScope = 'sub');
|
||||
this.opts.groupDnProperty || (this.opts.groupDnProperty = 'dn');
|
||||
+ this.opts.tlsStarted = false;
|
||||
|
||||
EventEmitter.call(this);
|
||||
|
||||
@@ -108,21 +109,7 @@ function LdapAuth(opts) {
|
||||
this._userClient.on('error', this._handleError.bind(this));
|
||||
|
||||
var self = this;
|
||||
- if (this.opts.starttls) {
|
||||
- // When starttls is enabled, this callback supplants the 'connect' callback
|
||||
- this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function(err) {
|
||||
- if (err) {
|
||||
- self._handleError(err);
|
||||
- } else {
|
||||
- self._onConnectAdmin();
|
||||
- }
|
||||
- });
|
||||
- this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function(err) {
|
||||
- if (err) {
|
||||
- self._handleError(err);
|
||||
- }
|
||||
- });
|
||||
- } else if (opts.reconnect) {
|
||||
+ if (opts.reconnect && !this.opts.starttls) {
|
||||
this.once('_installReconnectListener', function() {
|
||||
self.log && self.log.trace('install reconnect listener');
|
||||
self._adminClient.on('connect', function() {
|
||||
@@ -384,6 +371,28 @@ LdapAuth.prototype._findGroups = function(user, callback) {
|
||||
*/
|
||||
LdapAuth.prototype.authenticate = function(username, password, callback) {
|
||||
var self = this;
|
||||
+ if (this.opts.starttls && !this.opts.tlsStarted) {
|
||||
+ // When starttls is enabled, this callback supplants the 'connect' callback
|
||||
+ this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function (err) {
|
||||
+ if (err) {
|
||||
+ self._handleError(err);
|
||||
+ } else {
|
||||
+ self._onConnectAdmin(function(){self._handleAuthenticate(username, password, callback);});
|
||||
+ }
|
||||
+ });
|
||||
+ this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function (err) {
|
||||
+ if (err) {
|
||||
+ self._handleError(err);
|
||||
+ }
|
||||
+ });
|
||||
+ } else {
|
||||
+ self._handleAuthenticate(username, password, callback);
|
||||
+ }
|
||||
+};
|
||||
+
|
||||
+LdapAuth.prototype._handleAuthenticate = function (username, password, callback) {
|
||||
+ this.opts.tlsStarted = true;
|
||||
+ var self = this;
|
||||
|
||||
if (typeof password === 'undefined' || password === null || password === '') {
|
||||
return callback(new Error('no password given'));
|
|
@ -21,9 +21,11 @@ test-e2e-native:
|
|||
|
||||
test-e2e:
|
||||
docker compose build host-admin
|
||||
docker compose up -d host-admin
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e e2e
|
||||
|
||||
test-e2e-open:
|
||||
docker compose up -d host-admin
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e-open e2e-open
|
||||
|
||||
clean:
|
||||
|
|
|
@ -35,7 +35,7 @@ services:
|
|||
MAILTRAP_PASSWORD: 'password-for-mailtrap'
|
||||
|
||||
mongo:
|
||||
image: mongo:6.0
|
||||
image: mongo:8.0.11
|
||||
command: '--replSet overleaf'
|
||||
volumes:
|
||||
- ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -2,6 +2,7 @@ import {
|
|||
createNewFile,
|
||||
createProject,
|
||||
openProjectById,
|
||||
testNewFileUpload,
|
||||
} from './helpers/project'
|
||||
import { isExcludedBySharding, startWith } from './helpers/config'
|
||||
import { ensureUserExists, login } from './helpers/login'
|
||||
|
@ -119,24 +120,7 @@ describe('editor', () => {
|
|||
cy.get('button').contains('New file').click({ force: true })
|
||||
})
|
||||
|
||||
it('can upload file', () => {
|
||||
const name = `${uuid()}.txt`
|
||||
const content = `Test File Content ${name}`
|
||||
cy.get('button').contains('Upload').click({ force: true })
|
||||
cy.get('input[type=file]')
|
||||
.first()
|
||||
.selectFile(
|
||||
{
|
||||
contents: Cypress.Buffer.from(content),
|
||||
fileName: name,
|
||||
lastModified: Date.now(),
|
||||
},
|
||||
{ force: true }
|
||||
)
|
||||
// force: The file-tree pane is too narrow to display the full name.
|
||||
cy.findByTestId('file-tree').findByText(name).click({ force: true })
|
||||
cy.findByText(content)
|
||||
})
|
||||
testNewFileUpload()
|
||||
|
||||
it('should not display import from URL', () => {
|
||||
cy.findByText('From external URL').should('not.exist')
|
||||
|
|
104
server-ce/test/filestore-migration.spec.ts
Normal file
104
server-ce/test/filestore-migration.spec.ts
Normal file
|
@ -0,0 +1,104 @@
|
|||
import { ensureUserExists, login } from './helpers/login'
|
||||
import {
|
||||
createProject,
|
||||
openProjectById,
|
||||
prepareFileUploadTest,
|
||||
} from './helpers/project'
|
||||
import { isExcludedBySharding, startWith } from './helpers/config'
|
||||
import { prepareWaitForNextCompileSlot } from './helpers/compile'
|
||||
import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
import { purgeFilestoreData, runScript } from './helpers/hostAdminClient'
|
||||
|
||||
describe('filestore migration', function () {
|
||||
if (isExcludedBySharding('CE_CUSTOM_3')) return
|
||||
startWith({ withDataDir: true, resetData: true, vars: {} })
|
||||
ensureUserExists({ email: 'user@example.com' })
|
||||
|
||||
let projectName: string
|
||||
let projectId: string
|
||||
let waitForCompileRateLimitCoolOff: (fn: () => void) => void
|
||||
const previousBinaryFiles: (() => void)[] = []
|
||||
beforeWithReRunOnTestRetry(function () {
|
||||
projectName = `project-${uuid()}`
|
||||
login('user@example.com')
|
||||
createProject(projectName, { type: 'Example project' }).then(
|
||||
id => (projectId = id)
|
||||
)
|
||||
let queueReset
|
||||
;({ waitForCompileRateLimitCoolOff, queueReset } =
|
||||
prepareWaitForNextCompileSlot())
|
||||
queueReset()
|
||||
previousBinaryFiles.push(prepareFileUploadTest(true))
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
login('user@example.com')
|
||||
waitForCompileRateLimitCoolOff(() => {
|
||||
openProjectById(projectId)
|
||||
})
|
||||
})
|
||||
|
||||
function checkFilesAreAccessible() {
|
||||
it('can upload new binary file and read previous uploads', function () {
|
||||
previousBinaryFiles.push(prepareFileUploadTest(true))
|
||||
for (const check of previousBinaryFiles) {
|
||||
check()
|
||||
}
|
||||
})
|
||||
|
||||
it('renders frog jpg', () => {
|
||||
cy.findByTestId('file-tree').findByText('frog.jpg').click()
|
||||
cy.get('[alt="frog.jpg"]')
|
||||
.should('be.visible')
|
||||
.and('have.prop', 'naturalWidth')
|
||||
.should('be.greaterThan', 0)
|
||||
})
|
||||
}
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL not set', function () {
|
||||
startWith({ withDataDir: true, vars: {} })
|
||||
checkFilesAreAccessible()
|
||||
})
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=0', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '0' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=1', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=2', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' },
|
||||
})
|
||||
before(async function () {
|
||||
await runScript({
|
||||
cwd: 'services/history-v1',
|
||||
script: 'storage/scripts/back_fill_file_hash.mjs',
|
||||
})
|
||||
})
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '2' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('purge filestore data', function () {
|
||||
before(async function () {
|
||||
await purgeFilestoreData()
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -9,6 +9,7 @@ export function isExcludedBySharding(
|
|||
| 'CE_DEFAULT'
|
||||
| 'CE_CUSTOM_1'
|
||||
| 'CE_CUSTOM_2'
|
||||
| 'CE_CUSTOM_3'
|
||||
| 'PRO_DEFAULT_1'
|
||||
| 'PRO_DEFAULT_2'
|
||||
| 'PRO_CUSTOM_1'
|
||||
|
|
|
@ -85,6 +85,12 @@ export async function getRedisKeys() {
|
|||
return stdout.split('\n')
|
||||
}
|
||||
|
||||
export async function purgeFilestoreData() {
|
||||
await fetchJSON(`${hostAdminURL}/data/user_files`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
}
|
||||
|
||||
async function sleep(ms: number) {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(resolve, ms)
|
||||
|
|
|
@ -216,3 +216,43 @@ export function createNewFile() {
|
|||
|
||||
return fileName
|
||||
}
|
||||
|
||||
export function prepareFileUploadTest(binary = false) {
|
||||
const name = `${uuid()}.txt`
|
||||
const content = `Test File Content ${name}${binary ? ' \x00' : ''}`
|
||||
cy.get('button').contains('Upload').click({ force: true })
|
||||
cy.get('input[type=file]')
|
||||
.first()
|
||||
.selectFile(
|
||||
{
|
||||
contents: Cypress.Buffer.from(content),
|
||||
fileName: name,
|
||||
lastModified: Date.now(),
|
||||
},
|
||||
{ force: true }
|
||||
)
|
||||
|
||||
// wait for the upload to finish
|
||||
cy.findByRole('treeitem', { name })
|
||||
|
||||
return function check() {
|
||||
cy.findByRole('treeitem', { name }).click()
|
||||
if (binary) {
|
||||
cy.findByText(content).should('not.have.class', 'cm-line')
|
||||
} else {
|
||||
cy.findByText(content).should('have.class', 'cm-line')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function testNewFileUpload() {
|
||||
it('can upload text file', () => {
|
||||
const check = prepareFileUploadTest(false)
|
||||
check()
|
||||
})
|
||||
|
||||
it('can upload binary file', () => {
|
||||
const check = prepareFileUploadTest(true)
|
||||
check()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -29,6 +29,17 @@ const IMAGES = {
|
|||
PRO: process.env.IMAGE_TAG_PRO.replace(/:.+/, ''),
|
||||
}
|
||||
|
||||
function defaultDockerComposeOverride() {
|
||||
return {
|
||||
services: {
|
||||
sharelatex: {
|
||||
environment: {},
|
||||
},
|
||||
'git-bridge': {},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let previousConfig = ''
|
||||
|
||||
function readDockerComposeOverride() {
|
||||
|
@ -38,14 +49,7 @@ function readDockerComposeOverride() {
|
|||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
return {
|
||||
services: {
|
||||
sharelatex: {
|
||||
environment: {},
|
||||
},
|
||||
'git-bridge': {},
|
||||
},
|
||||
}
|
||||
return defaultDockerComposeOverride
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -77,12 +81,21 @@ app.use(bodyParser.json())
|
|||
app.use((req, res, next) => {
|
||||
// Basic access logs
|
||||
console.log(req.method, req.url, req.body)
|
||||
const json = res.json
|
||||
res.json = body => {
|
||||
console.log(req.method, req.url, req.body, '->', body)
|
||||
json.call(res, body)
|
||||
}
|
||||
next()
|
||||
})
|
||||
app.use((req, res, next) => {
|
||||
// Add CORS headers
|
||||
const accessControlAllowOrigin =
|
||||
process.env.ACCESS_CONTROL_ALLOW_ORIGIN || 'http://sharelatex'
|
||||
res.setHeader('Access-Control-Allow-Origin', accessControlAllowOrigin)
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type')
|
||||
res.setHeader('Access-Control-Max-Age', '3600')
|
||||
res.setHeader('Access-Control-Allow-Methods', 'DELETE, GET, HEAD, POST, PUT')
|
||||
next()
|
||||
})
|
||||
|
||||
|
@ -133,6 +146,7 @@ const allowedVars = Joi.object(
|
|||
'V1_HISTORY_URL',
|
||||
'SANDBOXED_COMPILES',
|
||||
'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES',
|
||||
'OVERLEAF_FILESTORE_MIGRATION_LEVEL',
|
||||
'OVERLEAF_TEMPLATES_USER_ID',
|
||||
'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS',
|
||||
'OVERLEAF_ALLOW_PUBLIC_ACCESS',
|
||||
|
@ -319,8 +333,19 @@ app.get('/redis/keys', (req, res) => {
|
|||
)
|
||||
})
|
||||
|
||||
app.delete('/data/user_files', (req, res) => {
|
||||
runDockerCompose(
|
||||
'exec',
|
||||
['sharelatex', 'rm', '-rf', '/var/lib/overleaf/data/user_files'],
|
||||
(error, stdout, stderr) => {
|
||||
res.json({ error, stdout, stderr })
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
app.use(handleValidationErrors())
|
||||
|
||||
purgeDataDir()
|
||||
writeDockerComposeOverride(defaultDockerComposeOverride())
|
||||
|
||||
app.listen(80)
|
||||
|
|
|
@ -42,7 +42,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -44,7 +44,7 @@ services:
|
|||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -42,7 +42,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -44,7 +44,7 @@ services:
|
|||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -47,7 +47,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -49,7 +49,7 @@ services:
|
|||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -55,7 +55,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -57,7 +57,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -75,7 +75,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -83,7 +83,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -150,10 +150,6 @@ const CONCURRENT_BATCHES = parseInt(process.env.CONCURRENT_BATCHES || '2', 10)
|
|||
const RETRIES = parseInt(process.env.RETRIES || '10', 10)
|
||||
const RETRY_DELAY_MS = parseInt(process.env.RETRY_DELAY_MS || '100', 10)
|
||||
|
||||
const USER_FILES_BUCKET_NAME = process.env.USER_FILES_BUCKET_NAME || ''
|
||||
if (!USER_FILES_BUCKET_NAME) {
|
||||
throw new Error('env var USER_FILES_BUCKET_NAME is missing')
|
||||
}
|
||||
const RETRY_FILESTORE_404 = process.env.RETRY_FILESTORE_404 === 'true'
|
||||
const BUFFER_DIR = fs.mkdtempSync(
|
||||
process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-'
|
||||
|
|
|
@ -9,15 +9,12 @@ import { Blob } from 'overleaf-editor-core'
|
|||
import {
|
||||
BlobStore,
|
||||
getStringLengthOfFile,
|
||||
GLOBAL_BLOBS,
|
||||
makeBlobForFile,
|
||||
} from '../lib/blob_store/index.js'
|
||||
import { db } from '../lib/mongodb.js'
|
||||
import commandLineArgs from 'command-line-args'
|
||||
import readline from 'node:readline'
|
||||
import { _blobIsBackedUp, backupBlob } from '../lib/backupBlob.mjs'
|
||||
import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js'
|
||||
import filestorePersistor from '../lib/persistor.js'
|
||||
import { setTimeout } from 'node:timers/promises'
|
||||
|
||||
// Silence warning.
|
||||
|
@ -52,12 +49,11 @@ ObjectId.cacheHexString = true
|
|||
*/
|
||||
|
||||
/**
|
||||
* @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_DELETE_PERMISSION: boolean, FIX_MISSING_HASH: boolean, LOGS: string}}
|
||||
* @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_MISSING_HASH: boolean, LOGS: string}}
|
||||
*/
|
||||
function parseArgs() {
|
||||
const args = commandLineArgs([
|
||||
{ name: 'fixNotFound', type: String, defaultValue: 'true' },
|
||||
{ name: 'fixDeletePermission', type: String, defaultValue: 'true' },
|
||||
{ name: 'fixHashMismatch', type: String, defaultValue: 'true' },
|
||||
{ name: 'fixMissingHash', type: String, defaultValue: 'true' },
|
||||
{ name: 'logs', type: String, defaultValue: '' },
|
||||
|
@ -74,20 +70,13 @@ function parseArgs() {
|
|||
}
|
||||
return {
|
||||
FIX_HASH_MISMATCH: boolVal('fixNotFound'),
|
||||
FIX_DELETE_PERMISSION: boolVal('fixDeletePermission'),
|
||||
FIX_NOT_FOUND: boolVal('fixHashMismatch'),
|
||||
FIX_MISSING_HASH: boolVal('fixMissingHash'),
|
||||
LOGS: args.logs,
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
FIX_HASH_MISMATCH,
|
||||
FIX_DELETE_PERMISSION,
|
||||
FIX_NOT_FOUND,
|
||||
FIX_MISSING_HASH,
|
||||
LOGS,
|
||||
} = parseArgs()
|
||||
const { FIX_HASH_MISMATCH, FIX_NOT_FOUND, FIX_MISSING_HASH, LOGS } = parseArgs()
|
||||
if (!LOGS) {
|
||||
throw new Error('--logs parameter missing')
|
||||
}
|
||||
|
@ -105,6 +94,37 @@ const STREAM_HIGH_WATER_MARK = parseInt(
|
|||
)
|
||||
const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10)
|
||||
|
||||
// Filestore endpoint location
|
||||
const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1'
|
||||
const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009'
|
||||
|
||||
async function fetchFromFilestore(projectId, fileId) {
|
||||
const url = `http://${FILESTORE_HOST}:${FILESTORE_PORT}/project/${projectId}/file/${fileId}`
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
throw new NotFoundError('file not found in filestore', {
|
||||
status: response.status,
|
||||
})
|
||||
}
|
||||
const body = await response.text()
|
||||
throw new OError('fetchFromFilestore failed', {
|
||||
projectId,
|
||||
fileId,
|
||||
status: response.status,
|
||||
body,
|
||||
})
|
||||
}
|
||||
if (!response.body) {
|
||||
throw new OError('fetchFromFilestore response has no body', {
|
||||
projectId,
|
||||
fileId,
|
||||
status: response.status,
|
||||
})
|
||||
}
|
||||
return response.body
|
||||
}
|
||||
|
||||
/** @type {ProjectsCollection} */
|
||||
const projectsCollection = db.collection('projects')
|
||||
/** @type {DeletedProjectsCollection} */
|
||||
|
@ -302,19 +322,16 @@ async function setHashInMongo(projectId, fileId, hash) {
|
|||
* @return {Promise<void>}
|
||||
*/
|
||||
async function importRestoredFilestoreFile(projectId, fileId, historyId) {
|
||||
const filestoreKey = `${projectId}/${fileId}`
|
||||
const path = `${BUFFER_DIR}/${projectId}_${fileId}`
|
||||
try {
|
||||
let s
|
||||
try {
|
||||
s = await filestorePersistor.getObjectStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
filestoreKey
|
||||
)
|
||||
s = await fetchFromFilestore(projectId, fileId)
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
throw new OError('missing blob, need to restore filestore file', {
|
||||
filestoreKey,
|
||||
projectId,
|
||||
fileId,
|
||||
})
|
||||
}
|
||||
throw err
|
||||
|
@ -325,7 +342,6 @@ async function importRestoredFilestoreFile(projectId, fileId, historyId) {
|
|||
)
|
||||
const blobStore = new BlobStore(historyId)
|
||||
const blob = await blobStore.putFile(path)
|
||||
await backupBlob(historyId, blob, path)
|
||||
await setHashInMongo(projectId, fileId, blob.getHash())
|
||||
} finally {
|
||||
await fs.promises.rm(path, { force: true })
|
||||
|
@ -339,13 +355,9 @@ async function importRestoredFilestoreFile(projectId, fileId, historyId) {
|
|||
* @return {Promise<Blob>}
|
||||
*/
|
||||
async function bufferFilestoreFileToDisk(projectId, fileId, path) {
|
||||
const filestoreKey = `${projectId}/${fileId}`
|
||||
try {
|
||||
await Stream.promises.pipeline(
|
||||
await filestorePersistor.getObjectStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
filestoreKey
|
||||
),
|
||||
await fetchFromFilestore(projectId, fileId),
|
||||
fs.createWriteStream(path, { highWaterMark: STREAM_HIGH_WATER_MARK })
|
||||
)
|
||||
const blob = await makeBlobForFile(path)
|
||||
|
@ -356,7 +368,8 @@ async function bufferFilestoreFileToDisk(projectId, fileId, path) {
|
|||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
throw new OError('missing blob, need to restore filestore file', {
|
||||
filestoreKey,
|
||||
projectId,
|
||||
fileId,
|
||||
})
|
||||
}
|
||||
throw err
|
||||
|
@ -389,7 +402,7 @@ async function uploadFilestoreFile(projectId, fileId) {
|
|||
const blob = await bufferFilestoreFileToDisk(projectId, fileId, path)
|
||||
const hash = blob.getHash()
|
||||
try {
|
||||
await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
|
||||
await ensureBlobExistsForFile(projectId, fileId, hash)
|
||||
} catch (err) {
|
||||
if (!(err instanceof Blob.NotFoundError)) throw err
|
||||
|
||||
|
@ -397,7 +410,7 @@ async function uploadFilestoreFile(projectId, fileId) {
|
|||
const historyId = project.overleaf.history.id.toString()
|
||||
const blobStore = new BlobStore(historyId)
|
||||
await blobStore.putBlob(path, blob)
|
||||
await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
|
||||
await ensureBlobExistsForFile(projectId, fileId, hash)
|
||||
}
|
||||
} finally {
|
||||
await fs.promises.rm(path, { force: true })
|
||||
|
@ -426,11 +439,7 @@ async function fixHashMismatch(line) {
|
|||
await importRestoredFilestoreFile(projectId, fileId, historyId)
|
||||
return true
|
||||
}
|
||||
return await ensureBlobExistsForFileAndUploadToAWS(
|
||||
projectId,
|
||||
fileId,
|
||||
computedHash
|
||||
)
|
||||
return await ensureBlobExistsForFile(projectId, fileId, computedHash)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -444,30 +453,19 @@ async function hashAlreadyUpdatedInFileTree(projectId, fileId, hash) {
|
|||
return fileRef.hash === hash
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} hash
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
async function needsBackingUpToAWS(projectId, hash) {
|
||||
if (GLOBAL_BLOBS.has(hash)) return false
|
||||
return !(await _blobIsBackedUp(projectId, hash))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} fileId
|
||||
* @param {string} hash
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
|
||||
async function ensureBlobExistsForFile(projectId, fileId, hash) {
|
||||
const { project } = await getProject(projectId)
|
||||
const historyId = project.overleaf.history.id.toString()
|
||||
const blobStore = new BlobStore(historyId)
|
||||
if (
|
||||
(await hashAlreadyUpdatedInFileTree(projectId, fileId, hash)) &&
|
||||
(await blobStore.getBlob(hash)) &&
|
||||
!(await needsBackingUpToAWS(projectId, hash))
|
||||
(await blobStore.getBlob(hash))
|
||||
) {
|
||||
return false // already processed
|
||||
}
|
||||
|
@ -488,7 +486,7 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
|
|||
)
|
||||
if (writtenBlob.getHash() !== hash) {
|
||||
// Double check download, better safe than sorry.
|
||||
throw new OError('blob corrupted', { writtenBlob })
|
||||
throw new OError('blob corrupted', { writtenBlob, hash })
|
||||
}
|
||||
|
||||
let blob = await blobStore.getBlob(hash)
|
||||
|
@ -497,7 +495,6 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
|
|||
// HACK: Skip upload to GCS and finalize putBlob operation directly.
|
||||
await blobStore.backend.insertBlob(historyId, writtenBlob)
|
||||
}
|
||||
await backupBlob(historyId, writtenBlob, path)
|
||||
} finally {
|
||||
await fs.promises.rm(path, { force: true })
|
||||
}
|
||||
|
@ -505,16 +502,6 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) {
|
|||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} line
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
async function fixDeletePermission(line) {
|
||||
let { projectId, fileId, hash } = JSON.parse(line)
|
||||
if (!hash) hash = await computeFilestoreFileHash(projectId, fileId)
|
||||
return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} line
|
||||
* @return {Promise<boolean>}
|
||||
|
@ -526,7 +513,7 @@ async function fixMissingHash(line) {
|
|||
} = await findFile(projectId, fileId)
|
||||
if (hash) {
|
||||
// processed, double check
|
||||
return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash)
|
||||
return await ensureBlobExistsForFile(projectId, fileId, hash)
|
||||
}
|
||||
await uploadFilestoreFile(projectId, fileId)
|
||||
return true
|
||||
|
@ -543,11 +530,6 @@ const CASES = {
|
|||
flag: FIX_HASH_MISMATCH,
|
||||
action: fixHashMismatch,
|
||||
},
|
||||
'delete permission': {
|
||||
match: 'storage.objects.delete',
|
||||
flag: FIX_DELETE_PERMISSION,
|
||||
action: fixDeletePermission,
|
||||
},
|
||||
'missing file hash': {
|
||||
match: '"bad file hash"',
|
||||
flag: FIX_MISSING_HASH,
|
||||
|
|
|
@ -20,7 +20,7 @@ import {
|
|||
makeProjectKey,
|
||||
} from '../../../../storage/lib/blob_store/index.js'
|
||||
|
||||
import express from 'express'
|
||||
import { mockFilestore } from './support/MockFilestore.mjs'
|
||||
|
||||
chai.use(chaiExclude)
|
||||
const TIMEOUT = 20 * 1_000
|
||||
|
@ -28,59 +28,6 @@ const TIMEOUT = 20 * 1_000
|
|||
const projectsCollection = db.collection('projects')
|
||||
const deletedProjectsCollection = db.collection('deletedProjects')
|
||||
|
||||
class MockFilestore {
|
||||
constructor() {
|
||||
this.host = process.env.FILESTORE_HOST || '127.0.0.1'
|
||||
this.port = process.env.FILESTORE_PORT || 3009
|
||||
// create a server listening on this.host and this.port
|
||||
this.files = {}
|
||||
|
||||
this.app = express()
|
||||
|
||||
this.app.get('/project/:projectId/file/:fileId', (req, res) => {
|
||||
const { projectId, fileId } = req.params
|
||||
const content = this.files[projectId]?.[fileId]
|
||||
if (!content) return res.status(404).end()
|
||||
res.status(200).end(content)
|
||||
})
|
||||
}
|
||||
|
||||
start() {
|
||||
// reset stored files
|
||||
this.files = {}
|
||||
// start the server
|
||||
if (this.serverPromise) {
|
||||
return this.serverPromise
|
||||
} else {
|
||||
this.serverPromise = new Promise((resolve, reject) => {
|
||||
this.server = this.app.listen(this.port, this.host, err => {
|
||||
if (err) return reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
return this.serverPromise
|
||||
}
|
||||
}
|
||||
|
||||
addFile(projectId, fileId, fileContent) {
|
||||
if (!this.files[projectId]) {
|
||||
this.files[projectId] = {}
|
||||
}
|
||||
this.files[projectId][fileId] = fileContent
|
||||
}
|
||||
|
||||
deleteObject(projectId, fileId) {
|
||||
if (this.files[projectId]) {
|
||||
delete this.files[projectId][fileId]
|
||||
if (Object.keys(this.files[projectId]).length === 0) {
|
||||
delete this.files[projectId]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mockFilestore = new MockFilestore()
|
||||
|
||||
/**
|
||||
* @param {ObjectId} objectId
|
||||
* @return {string}
|
||||
|
|
|
@ -1,48 +1,24 @@
|
|||
import fs from 'node:fs'
|
||||
import Crypto from 'node:crypto'
|
||||
import Stream from 'node:stream'
|
||||
import { promisify } from 'node:util'
|
||||
import { Binary, ObjectId } from 'mongodb'
|
||||
import { Blob } from 'overleaf-editor-core'
|
||||
import { backedUpBlobs, blobs, db } from '../../../../storage/lib/mongodb.js'
|
||||
import { db } from '../../../../storage/lib/mongodb.js'
|
||||
import cleanup from './support/cleanup.js'
|
||||
import testProjects from '../api/support/test_projects.js'
|
||||
import { execFile } from 'node:child_process'
|
||||
import chai, { expect } from 'chai'
|
||||
import chaiExclude from 'chai-exclude'
|
||||
import config from 'config'
|
||||
import { WritableBuffer } from '@overleaf/stream-utils'
|
||||
import {
|
||||
backupPersistor,
|
||||
projectBlobsBucket,
|
||||
} from '../../../../storage/lib/backupPersistor.mjs'
|
||||
import projectKey from '../../../../storage/lib/project_key.js'
|
||||
import {
|
||||
BlobStore,
|
||||
makeProjectKey,
|
||||
} from '../../../../storage/lib/blob_store/index.js'
|
||||
import ObjectPersistor from '@overleaf/object-persistor'
|
||||
import { BlobStore } from '../../../../storage/lib/blob_store/index.js'
|
||||
import { mockFilestore } from './support/MockFilestore.mjs'
|
||||
|
||||
chai.use(chaiExclude)
|
||||
|
||||
const TIMEOUT = 20 * 1_000
|
||||
|
||||
const { deksBucket } = config.get('backupStore')
|
||||
const { tieringStorageClass } = config.get('backupPersistor')
|
||||
|
||||
const projectsCollection = db.collection('projects')
|
||||
const deletedProjectsCollection = db.collection('deletedProjects')
|
||||
|
||||
const FILESTORE_PERSISTOR = ObjectPersistor({
|
||||
backend: 'gcs',
|
||||
gcs: {
|
||||
endpoint: {
|
||||
apiEndpoint: process.env.GCS_API_ENDPOINT,
|
||||
projectId: process.env.GCS_PROJECT_ID,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* @param {ObjectId} objectId
|
||||
* @return {string}
|
||||
|
@ -70,17 +46,6 @@ function binaryForGitBlobHash(gitBlobHash) {
|
|||
return new Binary(Buffer.from(gitBlobHash, 'hex'))
|
||||
}
|
||||
|
||||
async function listS3Bucket(bucket, wantStorageClass) {
|
||||
const client = backupPersistor._getClientForBucket(bucket)
|
||||
const response = await client.listObjectsV2({ Bucket: bucket }).promise()
|
||||
|
||||
for (const object of response.Contents || []) {
|
||||
expect(object).to.have.property('StorageClass', wantStorageClass)
|
||||
}
|
||||
|
||||
return (response.Contents || []).map(item => item.Key || '')
|
||||
}
|
||||
|
||||
function objectIdFromTime(timestamp) {
|
||||
return ObjectId.createFromTime(new Date(timestamp).getTime() / 1000)
|
||||
}
|
||||
|
@ -97,7 +62,6 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
const historyIdDeleted0 = projectIdDeleted0.toString()
|
||||
const fileIdWithDifferentHashFound = objectIdFromTime('2017-02-01T00:00:00Z')
|
||||
const fileIdInGoodState = objectIdFromTime('2017-02-01T00:01:00Z')
|
||||
const fileIdBlobExistsInGCS0 = objectIdFromTime('2017-02-01T00:02:00Z')
|
||||
const fileIdWithDifferentHashNotFound0 = objectIdFromTime(
|
||||
'2017-02-01T00:03:00Z'
|
||||
)
|
||||
|
@ -112,9 +76,6 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
const fileIdWithDifferentHashRestore = objectIdFromTime(
|
||||
'2017-02-01T00:08:00Z'
|
||||
)
|
||||
const fileIdBlobExistsInGCS1 = objectIdFromTime('2017-02-01T00:09:00Z')
|
||||
const fileIdRestoreFromFilestore0 = objectIdFromTime('2017-02-01T00:10:00Z')
|
||||
const fileIdRestoreFromFilestore1 = objectIdFromTime('2017-02-01T00:11:00Z')
|
||||
const fileIdMissing2 = objectIdFromTime('2017-02-01T00:12:00Z')
|
||||
const fileIdHashMissing0 = objectIdFromTime('2017-02-01T00:13:00Z')
|
||||
const fileIdHashMissing1 = objectIdFromTime('2017-02-01T00:14:00Z')
|
||||
|
@ -125,31 +86,11 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
)
|
||||
const deleteProjectsRecordId0 = new ObjectId()
|
||||
const writtenBlobs = [
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdBlobExistsInGCS0,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdBlobExistsInGCS1,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdWithDifferentHashNotFound0,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdRestoreFromFilestore0,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
fileId: fileIdRestoreFromFilestore1,
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
historyId: historyId0,
|
||||
|
@ -200,17 +141,6 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
},
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdRestoreFromFilestore0,
|
||||
err: { message: 'OError: hash mismatch' },
|
||||
hash: gitBlobHash(fileIdRestoreFromFilestore0),
|
||||
entry: {
|
||||
ctx: { historyId: historyId0.toString() },
|
||||
hash: hashDoesNotExistAsBlob,
|
||||
},
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectIdDeleted0,
|
||||
fileId: fileIdWithDifferentHashNotFound1,
|
||||
|
@ -236,33 +166,6 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
err: { message: 'NotFoundError' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdBlobExistsInGCS0,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS0),
|
||||
err: { message: 'storage.objects.delete' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdBlobExistsInGCSCorrupted,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
|
||||
err: { message: 'storage.objects.delete' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdBlobExistsInGCS1,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS1),
|
||||
err: { message: 'storage.objects.delete' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
fileId: fileIdRestoreFromFilestore1,
|
||||
err: { message: 'storage.objects.delete' },
|
||||
msg: 'failed to process file',
|
||||
},
|
||||
{
|
||||
projectId: projectIdDeleted0,
|
||||
fileId: fileIdMissing1,
|
||||
|
@ -291,22 +194,23 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
reason: 'bad file hash',
|
||||
msg: 'bad file-tree path',
|
||||
},
|
||||
{
|
||||
projectId: projectId0,
|
||||
_id: fileIdBlobExistsInGCSCorrupted,
|
||||
reason: 'bad file hash',
|
||||
msg: 'bad file-tree path',
|
||||
},
|
||||
]
|
||||
if (PRINT_IDS_AND_HASHES_FOR_DEBUGGING) {
|
||||
const fileIds = {
|
||||
fileIdWithDifferentHashFound,
|
||||
fileIdInGoodState,
|
||||
fileIdBlobExistsInGCS0,
|
||||
fileIdBlobExistsInGCS1,
|
||||
fileIdWithDifferentHashNotFound0,
|
||||
fileIdWithDifferentHashNotFound1,
|
||||
fileIdBlobExistsInGCSCorrupted,
|
||||
fileIdMissing0,
|
||||
fileIdMissing1,
|
||||
fileIdMissing2,
|
||||
fileIdWithDifferentHashRestore,
|
||||
fileIdRestoreFromFilestore0,
|
||||
fileIdRestoreFromFilestore1,
|
||||
fileIdHashMissing0,
|
||||
fileIdHashMissing1,
|
||||
}
|
||||
|
@ -330,38 +234,25 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
before(cleanup.everything)
|
||||
|
||||
before('populate blobs/GCS', async function () {
|
||||
await FILESTORE_PERSISTOR.sendStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
`${projectId0}/${fileIdRestoreFromFilestore0}`,
|
||||
Stream.Readable.from([fileIdRestoreFromFilestore0.toString()])
|
||||
await mockFilestore.start()
|
||||
mockFilestore.addFile(
|
||||
projectId0,
|
||||
fileIdHashMissing0,
|
||||
fileIdHashMissing0.toString()
|
||||
)
|
||||
await FILESTORE_PERSISTOR.sendStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
`${projectId0}/${fileIdRestoreFromFilestore1}`,
|
||||
Stream.Readable.from([fileIdRestoreFromFilestore1.toString()])
|
||||
mockFilestore.addFile(
|
||||
projectId0,
|
||||
fileIdHashMissing1,
|
||||
fileIdHashMissing1.toString()
|
||||
)
|
||||
await FILESTORE_PERSISTOR.sendStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
`${projectId0}/${fileIdHashMissing0}`,
|
||||
Stream.Readable.from([fileIdHashMissing0.toString()])
|
||||
)
|
||||
await FILESTORE_PERSISTOR.sendStream(
|
||||
USER_FILES_BUCKET_NAME,
|
||||
`${projectId0}/${fileIdHashMissing1}`,
|
||||
Stream.Readable.from([fileIdHashMissing1.toString()])
|
||||
mockFilestore.addFile(
|
||||
projectId0,
|
||||
fileIdBlobExistsInGCSCorrupted,
|
||||
fileIdBlobExistsInGCSCorrupted.toString()
|
||||
)
|
||||
await new BlobStore(historyId0.toString()).putString(
|
||||
fileIdHashMissing1.toString() // partially processed
|
||||
)
|
||||
await new BlobStore(historyId0.toString()).putString(
|
||||
fileIdBlobExistsInGCS0.toString()
|
||||
)
|
||||
await new BlobStore(historyId0.toString()).putString(
|
||||
fileIdBlobExistsInGCS1.toString()
|
||||
)
|
||||
await new BlobStore(historyId0.toString()).putString(
|
||||
fileIdRestoreFromFilestore1.toString()
|
||||
)
|
||||
const path = '/tmp/test-blob-corrupted'
|
||||
try {
|
||||
await fs.promises.writeFile(path, contentCorruptedBlob)
|
||||
|
@ -426,22 +317,10 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
_id: fileIdWithDifferentHashNotFound0,
|
||||
hash: hashDoesNotExistAsBlob,
|
||||
},
|
||||
{
|
||||
_id: fileIdRestoreFromFilestore0,
|
||||
hash: hashDoesNotExistAsBlob,
|
||||
},
|
||||
{
|
||||
_id: fileIdRestoreFromFilestore1,
|
||||
},
|
||||
{
|
||||
_id: fileIdBlobExistsInGCS0,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS0),
|
||||
},
|
||||
{
|
||||
_id: fileIdBlobExistsInGCSCorrupted,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
|
||||
},
|
||||
{ _id: fileIdBlobExistsInGCS1 },
|
||||
],
|
||||
folders: [],
|
||||
},
|
||||
|
@ -546,8 +425,8 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
})
|
||||
it('should print stats', function () {
|
||||
expect(stats).to.contain({
|
||||
processedLines: 16,
|
||||
success: 11,
|
||||
processedLines: 12,
|
||||
success: 7,
|
||||
alreadyProcessed: 0,
|
||||
fileDeleted: 0,
|
||||
skipped: 0,
|
||||
|
@ -558,9 +437,9 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
it('should handle re-run on same logs', async function () {
|
||||
;({ stats } = await runScriptWithLogs())
|
||||
expect(stats).to.contain({
|
||||
processedLines: 16,
|
||||
processedLines: 12,
|
||||
success: 0,
|
||||
alreadyProcessed: 8,
|
||||
alreadyProcessed: 4,
|
||||
fileDeleted: 3,
|
||||
skipped: 0,
|
||||
failed: 3,
|
||||
|
@ -663,31 +542,11 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
_id: fileIdWithDifferentHashNotFound0,
|
||||
hash: gitBlobHash(fileIdWithDifferentHashNotFound0),
|
||||
},
|
||||
// Updated hash
|
||||
{
|
||||
_id: fileIdRestoreFromFilestore0,
|
||||
hash: gitBlobHash(fileIdRestoreFromFilestore0),
|
||||
},
|
||||
// Added hash
|
||||
{
|
||||
_id: fileIdRestoreFromFilestore1,
|
||||
hash: gitBlobHash(fileIdRestoreFromFilestore1),
|
||||
},
|
||||
// No change, blob created
|
||||
{
|
||||
_id: fileIdBlobExistsInGCS0,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS0),
|
||||
},
|
||||
// No change, flagged
|
||||
{
|
||||
_id: fileIdBlobExistsInGCSCorrupted,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted),
|
||||
},
|
||||
// Added hash
|
||||
{
|
||||
_id: fileIdBlobExistsInGCS1,
|
||||
hash: gitBlobHash(fileIdBlobExistsInGCS1),
|
||||
},
|
||||
],
|
||||
folders: [],
|
||||
},
|
||||
|
@ -696,7 +555,7 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
],
|
||||
overleaf: { history: { id: historyId0 } },
|
||||
// Incremented when removing file/updating hash
|
||||
version: 8,
|
||||
version: 5,
|
||||
},
|
||||
])
|
||||
expect(await deletedProjectsCollection.find({}).toArray()).to.deep.equal([
|
||||
|
@ -745,62 +604,6 @@ describe('back_fill_file_hash_fix_up script', function () {
|
|||
(writtenBlobsByProject.get(projectId) || []).concat([fileId])
|
||||
)
|
||||
}
|
||||
expect(
|
||||
(await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map(
|
||||
entry => {
|
||||
// blobs are pushed unordered into mongo. Sort the list for consistency.
|
||||
entry.blobs.sort()
|
||||
return entry
|
||||
}
|
||||
)
|
||||
).to.deep.equal(
|
||||
Array.from(writtenBlobsByProject.entries()).map(
|
||||
([projectId, fileIds]) => {
|
||||
return {
|
||||
_id: projectId,
|
||||
blobs: fileIds
|
||||
.map(fileId => binaryForGitBlobHash(gitBlobHash(fileId)))
|
||||
.sort(),
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
it('should have backed up all the files', async function () {
|
||||
expect(tieringStorageClass).to.exist
|
||||
const objects = await listS3Bucket(projectBlobsBucket, tieringStorageClass)
|
||||
expect(objects.sort()).to.deep.equal(
|
||||
writtenBlobs
|
||||
.map(({ historyId, fileId, hash }) =>
|
||||
makeProjectKey(historyId, hash || gitBlobHash(fileId))
|
||||
)
|
||||
.sort()
|
||||
)
|
||||
for (let { historyId, fileId } of writtenBlobs) {
|
||||
const hash = gitBlobHash(fileId.toString())
|
||||
const s = await backupPersistor.getObjectStream(
|
||||
projectBlobsBucket,
|
||||
makeProjectKey(historyId, hash),
|
||||
{ autoGunzip: true }
|
||||
)
|
||||
const buf = new WritableBuffer()
|
||||
await Stream.promises.pipeline(s, buf)
|
||||
expect(gitBlobHashBuffer(buf.getContents())).to.equal(hash)
|
||||
const id = buf.getContents().toString('utf-8')
|
||||
expect(id).to.equal(fileId.toString())
|
||||
// double check we are not comparing 'undefined' or '[object Object]' above
|
||||
expect(id).to.match(/^[a-f0-9]{24}$/)
|
||||
}
|
||||
const deks = await listS3Bucket(deksBucket, 'STANDARD')
|
||||
expect(deks.sort()).to.deep.equal(
|
||||
Array.from(
|
||||
new Set(
|
||||
writtenBlobs.map(
|
||||
({ historyId }) => projectKey.format(historyId) + '/dek'
|
||||
)
|
||||
)
|
||||
).sort()
|
||||
)
|
||||
})
|
||||
it('should have written the back filled files to history v1', async function () {
|
||||
for (const { historyId, fileId } of writtenBlobs) {
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
import express from 'express'
|
||||
|
||||
class MockFilestore {
|
||||
constructor() {
|
||||
this.host = process.env.FILESTORE_HOST || '127.0.0.1'
|
||||
this.port = process.env.FILESTORE_PORT || 3009
|
||||
// create a server listening on this.host and this.port
|
||||
this.files = {}
|
||||
|
||||
this.app = express()
|
||||
|
||||
this.app.get('/project/:projectId/file/:fileId', (req, res) => {
|
||||
const { projectId, fileId } = req.params
|
||||
const content = this.files[projectId]?.[fileId]
|
||||
if (!content) return res.status(404).end()
|
||||
res.status(200).end(content)
|
||||
})
|
||||
}
|
||||
|
||||
start() {
|
||||
// reset stored files
|
||||
this.files = {}
|
||||
// start the server
|
||||
if (this.serverPromise) {
|
||||
return this.serverPromise
|
||||
} else {
|
||||
this.serverPromise = new Promise((resolve, reject) => {
|
||||
this.server = this.app.listen(this.port, this.host, err => {
|
||||
if (err) return reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
return this.serverPromise
|
||||
}
|
||||
}
|
||||
|
||||
addFile(projectId, fileId, fileContent) {
|
||||
if (!this.files[projectId]) {
|
||||
this.files[projectId] = {}
|
||||
}
|
||||
this.files[projectId][fileId] = fileContent
|
||||
}
|
||||
|
||||
deleteObject(projectId, fileId) {
|
||||
if (this.files[projectId]) {
|
||||
delete this.files[projectId][fileId]
|
||||
if (Object.keys(this.files[projectId]).length === 0) {
|
||||
delete this.files[projectId]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const mockFilestore = new MockFilestore()
|
|
@ -42,7 +42,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -44,7 +44,7 @@ services:
|
|||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -55,7 +55,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -57,7 +57,7 @@ services:
|
|||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -56,14 +56,8 @@ if (Settings.catchErrors) {
|
|||
// Create ./data/dumpFolder if needed
|
||||
FileWriter.ensureDumpFolderExists()
|
||||
|
||||
if (
|
||||
!Features.hasFeature('project-history-blobs') &&
|
||||
!Features.hasFeature('filestore')
|
||||
) {
|
||||
throw new Error(
|
||||
'invalid config: must enable either project-history-blobs (Settings.enableProjectHistoryBlobs=true) or enable filestore (Settings.disableFilestore=false)'
|
||||
)
|
||||
}
|
||||
// Validate combination of feature flags.
|
||||
Features.validateSettings()
|
||||
|
||||
// handle SIGTERM for graceful shutdown in kubernetes
|
||||
process.on('SIGTERM', function (signal) {
|
||||
|
|
|
@ -36,7 +36,22 @@ function send401WithChallenge(res) {
|
|||
function checkCredentials(userDetailsMap, user, password) {
|
||||
const expectedPassword = userDetailsMap.get(user)
|
||||
const userExists = userDetailsMap.has(user) && expectedPassword // user exists with a non-null password
|
||||
const isValid = userExists && tsscmp(expectedPassword, password)
|
||||
|
||||
let isValid = false
|
||||
if (userExists) {
|
||||
if (Array.isArray(expectedPassword)) {
|
||||
const isValidPrimary = Boolean(
|
||||
expectedPassword[0] && tsscmp(expectedPassword[0], password)
|
||||
)
|
||||
const isValidFallback = Boolean(
|
||||
expectedPassword[1] && tsscmp(expectedPassword[1], password)
|
||||
)
|
||||
isValid = isValidPrimary || isValidFallback
|
||||
} else {
|
||||
isValid = tsscmp(expectedPassword, password)
|
||||
}
|
||||
}
|
||||
|
||||
if (!isValid) {
|
||||
logger.err({ user }, 'invalid login details')
|
||||
}
|
||||
|
@ -82,6 +97,7 @@ const AuthenticationController = {
|
|||
analyticsId: user.analyticsId || user._id,
|
||||
alphaProgram: user.alphaProgram || undefined, // only store if set
|
||||
betaProgram: user.betaProgram || undefined, // only store if set
|
||||
externalAuth: user.externalAuth || false,
|
||||
}
|
||||
if (user.isAdmin) {
|
||||
lightUser.isAdmin = true
|
||||
|
|
|
@ -7,6 +7,7 @@ import logger from '@overleaf/logger'
|
|||
import _ from 'lodash'
|
||||
import { plainTextResponse } from '../../infrastructure/Response.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import Modules from '../../infrastructure/Modules.js'
|
||||
|
||||
async function getDocument(req, res) {
|
||||
const { Project_id: projectId, doc_id: docId } = req.params
|
||||
|
@ -92,6 +93,9 @@ async function setDocument(req, res) {
|
|||
{ docId, projectId },
|
||||
'finished receiving set document request from api (docupdater)'
|
||||
)
|
||||
|
||||
await Modules.promises.hooks.fire('docModified', projectId, docId)
|
||||
|
||||
res.json(result)
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ function projectHistoryURLWithFilestoreFallback(
|
|||
) {
|
||||
const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}`
|
||||
// TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests.
|
||||
if (fileRef.hash && Settings.enableProjectHistoryBlobs) {
|
||||
if (fileRef.hash && Settings.filestoreMigrationLevel >= 1) {
|
||||
return {
|
||||
url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
|
||||
fallbackURL: filestoreURL,
|
||||
|
|
|
@ -72,6 +72,7 @@ async function getUserForPasswordResetToken(token) {
|
|||
'overleaf.id': 1,
|
||||
email: 1,
|
||||
must_reconfirm: 1,
|
||||
hashedPassword: 1,
|
||||
})
|
||||
|
||||
await assertUserPermissions(user, ['change-password'])
|
||||
|
|
|
@ -66,7 +66,7 @@ function uploadProject(req, res, next) {
|
|||
async function uploadFile(req, res, next) {
|
||||
const timer = new metrics.Timer('file-upload')
|
||||
const name = req.body.name
|
||||
const path = req.file?.path
|
||||
const { path } = req.file
|
||||
const projectId = req.params.Project_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
let { folder_id: folderId } = req.query
|
||||
|
@ -162,8 +162,14 @@ function multerMiddleware(req, res, next) {
|
|||
.status(422)
|
||||
.json({ success: false, error: req.i18n.translate('file_too_large') })
|
||||
}
|
||||
|
||||
return next(err)
|
||||
if (err) return next(err)
|
||||
if (!req.file?.path) {
|
||||
logger.info({ req }, 'missing req.file.path on upload')
|
||||
return res
|
||||
.status(400)
|
||||
.json({ success: false, error: 'invalid_upload_request' })
|
||||
}
|
||||
next()
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -518,4 +518,5 @@ module.exports = {
|
|||
expireDeletedUsersAfterDuration: expressify(expireDeletedUsersAfterDuration),
|
||||
ensureAffiliationMiddleware: expressify(ensureAffiliationMiddleware),
|
||||
ensureAffiliation,
|
||||
doLogout,
|
||||
}
|
||||
|
|
|
@ -52,10 +52,8 @@ async function settingsPage(req, res) {
|
|||
const reconfirmedViaSAML = _.get(req.session, ['saml', 'reconfirmed'])
|
||||
delete req.session.saml
|
||||
let shouldAllowEditingDetails = true
|
||||
if (Settings.ldap && Settings.ldap.updateUserDetailsOnLogin) {
|
||||
shouldAllowEditingDetails = false
|
||||
}
|
||||
if (Settings.saml && Settings.saml.updateUserDetailsOnLogin) {
|
||||
const externalAuth = req.user.externalAuth
|
||||
if (externalAuth && Settings[externalAuth].updateUserDetailsOnLogin) {
|
||||
shouldAllowEditingDetails = false
|
||||
}
|
||||
const oauthProviders = Settings.oauthProviders || {}
|
||||
|
|
|
@ -107,9 +107,9 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) {
|
|||
|
||||
webRouter.use(function (req, res, next) {
|
||||
req.externalAuthenticationSystemUsed =
|
||||
Features.externalAuthenticationSystemUsed
|
||||
() => !!req?.user?.externalAuth
|
||||
res.locals.externalAuthenticationSystemUsed =
|
||||
Features.externalAuthenticationSystemUsed
|
||||
() => !!req?.user?.externalAuth
|
||||
req.hasFeature = res.locals.hasFeature = Features.hasFeature
|
||||
next()
|
||||
})
|
||||
|
|
|
@ -19,8 +19,7 @@ const trackChangesModuleAvailable =
|
|||
* @property {boolean | undefined} enableGithubSync
|
||||
* @property {boolean | undefined} enableGitBridge
|
||||
* @property {boolean | undefined} enableHomepage
|
||||
* @property {boolean | undefined} enableProjectHistoryBlobs
|
||||
* @property {boolean | undefined} disableFilestore
|
||||
* @property {number} filestoreMigrationLevel
|
||||
* @property {boolean | undefined} enableSaml
|
||||
* @property {boolean | undefined} ldap
|
||||
* @property {boolean | undefined} oauth
|
||||
|
@ -30,6 +29,14 @@ const trackChangesModuleAvailable =
|
|||
*/
|
||||
|
||||
const Features = {
|
||||
validateSettings() {
|
||||
if (![0, 1, 2].includes(Settings.filestoreMigrationLevel)) {
|
||||
throw new Error(
|
||||
`invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2`
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
|
@ -56,7 +63,7 @@ const Features = {
|
|||
case 'registration-page':
|
||||
return (
|
||||
!Features.externalAuthenticationSystemUsed() ||
|
||||
Boolean(Settings.overleaf)
|
||||
Boolean(Settings.overleaf) || Settings.oidc?.allowedOIDCEmailDomains
|
||||
)
|
||||
case 'registration':
|
||||
return Boolean(Settings.overleaf)
|
||||
|
@ -89,9 +96,9 @@ const Features = {
|
|||
Settings.enabledLinkedFileTypes.includes('url')
|
||||
)
|
||||
case 'project-history-blobs':
|
||||
return Boolean(Settings.enableProjectHistoryBlobs)
|
||||
return Settings.filestoreMigrationLevel > 0
|
||||
case 'filestore':
|
||||
return Boolean(Settings.disableFilestore) === false
|
||||
return Settings.filestoreMigrationLevel < 2
|
||||
case 'support':
|
||||
return supportModuleAvailable
|
||||
case 'symbol-palette':
|
||||
|
|
|
@ -150,8 +150,7 @@ async function linkedFileAgentsIncludes() {
|
|||
async function attachHooks() {
|
||||
for (const module of await modules()) {
|
||||
const { promises, ...hooks } = module.hooks || {}
|
||||
for (const hook in promises || {}) {
|
||||
const method = promises[hook]
|
||||
for (const [hook, method] of Object.entries(promises || {})) {
|
||||
attachHook(hook, method)
|
||||
}
|
||||
for (const hook in hooks || {}) {
|
||||
|
|
|
@ -217,6 +217,8 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) {
|
|||
CaptchaMiddleware.canSkipCaptcha
|
||||
)
|
||||
|
||||
await Modules.applyRouter(webRouter, privateApiRouter, publicApiRouter)
|
||||
|
||||
webRouter.get('/login', UserPagesController.loginPage)
|
||||
AuthenticationController.addEndpointToLoginWhitelist('/login')
|
||||
|
||||
|
@ -285,8 +287,6 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) {
|
|||
TokenAccessRouter.apply(webRouter)
|
||||
HistoryRouter.apply(webRouter, privateApiRouter)
|
||||
|
||||
await Modules.applyRouter(webRouter, privateApiRouter, publicApiRouter)
|
||||
|
||||
if (Settings.enableSubscriptions) {
|
||||
webRouter.get(
|
||||
'/user/bonus',
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
section.cookie-banner.hidden-print.hidden(aria-label='Cookie banner')
|
||||
.cookie-banner-content We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our <a href="/legal#Cookies">cookie policy</a>.
|
||||
section.cookie-banner.hidden-print.hidden(aria-label=translate('cookie_banner'))
|
||||
.cookie-banner-content !{translate('cookie_banner_info', {}, [{ name: 'a', attrs: { href: '/legal#Cookies' }}])}
|
||||
.cookie-banner-actions
|
||||
button(
|
||||
type='button'
|
||||
class='btn btn-link btn-sm'
|
||||
data-ol-cookie-banner-set-consent='essential'
|
||||
) Essential cookies only
|
||||
) #{translate('essential_cookies_only')}
|
||||
button(
|
||||
type='button'
|
||||
class='btn btn-primary btn-sm'
|
||||
data-ol-cookie-banner-set-consent='all'
|
||||
) Accept all cookies
|
||||
) #{translate('accept_all_cookies')}
|
||||
|
|
|
@ -4,7 +4,7 @@ block vars
|
|||
- var suppressNavbar = true
|
||||
- var suppressFooter = true
|
||||
- var suppressSkipToContent = true
|
||||
- var suppressCookieBanner = true
|
||||
- var suppressPugCookieBanner = true
|
||||
|
||||
block content
|
||||
.content.content-alt
|
||||
|
|
|
@ -24,7 +24,7 @@ block body
|
|||
else
|
||||
include layout/fat-footer
|
||||
|
||||
if typeof suppressCookieBanner == 'undefined'
|
||||
if typeof suppressPugCookieBanner == 'undefined'
|
||||
include _cookie_banner
|
||||
|
||||
if bootstrapVersion === 5
|
||||
|
|
|
@ -69,5 +69,5 @@ block body
|
|||
else
|
||||
include layout/fat-footer-react-bootstrap-5
|
||||
|
||||
if typeof suppressCookieBanner === 'undefined'
|
||||
if typeof suppressPugCookieBanner === 'undefined'
|
||||
include _cookie_banner
|
||||
|
|
|
@ -27,7 +27,7 @@ block body
|
|||
else
|
||||
include layout/fat-footer-website-redesign
|
||||
|
||||
if typeof suppressCookieBanner == 'undefined'
|
||||
if typeof suppressPugCookieBanner == 'undefined'
|
||||
include _cookie_banner
|
||||
|
||||
block contactModal
|
||||
|
|
|
@ -2,7 +2,7 @@ extends ../../layout-marketing
|
|||
|
||||
block vars
|
||||
- var suppressFooter = true
|
||||
- var suppressCookieBanner = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- var suppressSkipToContent = true
|
||||
|
||||
block content
|
||||
|
|
|
@ -7,7 +7,7 @@ block vars
|
|||
- var suppressNavbar = true
|
||||
- var suppressFooter = true
|
||||
- var suppressSkipToContent = true
|
||||
- var suppressCookieBanner = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- metadata.robotsNoindexNofollow = true
|
||||
|
||||
block content
|
||||
|
|
|
@ -7,6 +7,7 @@ block vars
|
|||
- const suppressNavContentLinks = true
|
||||
- const suppressNavbar = true
|
||||
- const suppressFooter = true
|
||||
- const suppressPugCookieBanner = true
|
||||
|
||||
block append meta
|
||||
meta(
|
||||
|
|
|
@ -5,7 +5,7 @@ block entrypointVar
|
|||
|
||||
block vars
|
||||
- var suppressFooter = true
|
||||
- var suppressCookieBanner = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- var suppressSkipToContent = true
|
||||
|
||||
block append meta
|
||||
|
|
|
@ -5,7 +5,7 @@ block entrypointVar
|
|||
|
||||
block vars
|
||||
- var suppressFooter = true
|
||||
- var suppressCookieBanner = true
|
||||
- var suppressPugCookieBanner = true
|
||||
- var suppressSkipToContent = true
|
||||
|
||||
block append meta
|
||||
|
|
|
@ -24,9 +24,9 @@ block content
|
|||
.form-group
|
||||
input.form-control(
|
||||
name='email'
|
||||
type='email'
|
||||
type=(settings.ldap && settings.ldap.enable) ? 'text' : 'email'
|
||||
required
|
||||
placeholder='email@example.com'
|
||||
placeholder=(settings.ldap && settings.ldap.enable) ? settings.ldap.placeholder : 'email@example.com'
|
||||
autofocus='true'
|
||||
)
|
||||
.form-group
|
||||
|
@ -44,3 +44,21 @@ block content
|
|||
if login_support_text
|
||||
hr
|
||||
p.text-center !{login_support_text}
|
||||
if settings.saml && settings.saml.enable
|
||||
.actions(style='margin-top: 30px;')
|
||||
a.button.btn-secondary.btn(
|
||||
href='/saml/login',
|
||||
style="width: 100%;"
|
||||
data-ol-disabled-inflight
|
||||
)
|
||||
span(data-ol-inflight="idle") #{settings.saml.identityServiceName}
|
||||
span(hidden data-ol-inflight="pending") #{translate("logging_in")}…
|
||||
if settings.oidc && settings.oidc.enable
|
||||
.actions(style='margin-top: 30px;')
|
||||
a.button.btn-secondary.btn(
|
||||
href='/oidc/login',
|
||||
style="width: 100%;"
|
||||
data-ol-disabled-inflight
|
||||
)
|
||||
span(data-ol-inflight="idle") #{settings.oidc.identityServiceName}
|
||||
span(hidden data-ol-inflight="pending") #{translate("logging_in")}…
|
||||
|
|
|
@ -50,7 +50,7 @@ block content
|
|||
+notification({ariaLive: 'assertive', type: 'error', className: 'mb-3', content: translate(error)})
|
||||
|
||||
div(data-ol-custom-form-message='no-password-allowed-due-to-sso' hidden)
|
||||
+notification({ariaLive: 'polite', type: 'error', className: 'mb-3', content: translate('you_cant_reset_password_due_to_sso', {}, [{name: 'a', attrs: {href: '/sso-login'}}])})
|
||||
+notification({ariaLive: 'polite', type: 'error', className: 'mb-3', content: translate('you_cant_reset_password_due_to_ldap_or_sso')})
|
||||
input(name='_csrf' type='hidden' value=csrfToken)
|
||||
.form-group.mb-3
|
||||
label.form-label(for='email') #{translate("email")}
|
||||
|
|
|
@ -440,6 +440,9 @@ module.exports = {
|
|||
','
|
||||
),
|
||||
|
||||
filestoreMigrationLevel:
|
||||
parseInt(process.env.OVERLEAF_FILESTORE_MIGRATION_LEVEL, 10) || 0,
|
||||
|
||||
// i18n
|
||||
// ------
|
||||
//
|
||||
|
@ -1030,6 +1033,9 @@ module.exports = {
|
|||
'launchpad',
|
||||
'server-ce-scripts',
|
||||
'user-activate',
|
||||
'authentication/ldap',
|
||||
'authentication/saml',
|
||||
'authentication/oidc',
|
||||
],
|
||||
viewIncludes: {},
|
||||
|
||||
|
@ -1056,6 +1062,20 @@ module.exports = {
|
|||
managedUsers: {
|
||||
enabled: false,
|
||||
},
|
||||
|
||||
oauthProviders: {
|
||||
...(process.env.EXTERNAL_AUTH && process.env.EXTERNAL_AUTH.includes('oidc') && {
|
||||
[process.env.OVERLEAF_OIDC_PROVIDER_ID || 'oidc']: {
|
||||
name: process.env.OVERLEAF_OIDC_PROVIDER_NAME || 'OIDC Provider',
|
||||
descriptionKey: process.env.OVERLEAF_OIDC_PROVIDER_DESCRIPTION,
|
||||
descriptionOptions: { link: process.env.OVERLEAF_OIDC_PROVIDER_INFO_LINK },
|
||||
hideWhenNotLinked: process.env.OVERLEAF_OIDC_PROVIDER_HIDE_NOT_LINKED ?
|
||||
process.env.OVERLEAF_OIDC_PROVIDER_HIDE_NOT_LINKED.toLowerCase() === 'true' : undefined,
|
||||
linkPath: '/oidc/login',
|
||||
},
|
||||
}),
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
module.exports.mergeWith = function (overrides) {
|
||||
|
|
|
@ -95,7 +95,7 @@ services:
|
|||
image: redis:7.4.3
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
logging:
|
||||
driver: none
|
||||
command: --replSet overleaf
|
||||
|
|
|
@ -91,7 +91,7 @@ services:
|
|||
image: redis:7.4.3
|
||||
|
||||
mongo:
|
||||
image: mongo:7.0.20
|
||||
image: mongo:8.0.11
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
"about_to_remove_user_preamble": "",
|
||||
"about_to_trash_projects": "",
|
||||
"abstract": "",
|
||||
"accept_all_cookies": "",
|
||||
"accept_and_continue": "",
|
||||
"accept_change": "",
|
||||
"accept_change_error_description": "",
|
||||
|
@ -332,6 +333,8 @@
|
|||
"continue_to": "",
|
||||
"continue_using_free_features": "",
|
||||
"continue_with_free_plan": "",
|
||||
"cookie_banner": "",
|
||||
"cookie_banner_info": "",
|
||||
"copied": "",
|
||||
"copy": "",
|
||||
"copy_code": "",
|
||||
|
@ -544,6 +547,7 @@
|
|||
"error_opening_document_detail": "",
|
||||
"error_performing_request": "",
|
||||
"error_processing_file": "",
|
||||
"essential_cookies_only": "",
|
||||
"example_project": "",
|
||||
"existing_plan_active_until_term_end": "",
|
||||
"expand": "",
|
||||
|
@ -863,6 +867,7 @@
|
|||
"invalid_password_too_similar": "",
|
||||
"invalid_regular_expression": "",
|
||||
"invalid_request": "",
|
||||
"invalid_upload_request": "",
|
||||
"invite": "",
|
||||
"invite_expired": "",
|
||||
"invite_more_collabs": "",
|
||||
|
@ -2140,6 +2145,7 @@
|
|||
"you_can_select_or_invite_collaborator": "",
|
||||
"you_can_select_or_invite_collaborator_plural": "",
|
||||
"you_can_still_use_your_premium_features": "",
|
||||
"you_cant_add_or_change_password_due_to_ldap_or_sso": "",
|
||||
"you_cant_add_or_change_password_due_to_sso": "",
|
||||
"you_cant_join_this_group_subscription": "",
|
||||
"you_dont_have_any_add_ons_on_your_account": "",
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
import getMeta from '@/utils/meta'
|
||||
|
||||
function loadGA() {
|
||||
if (window.olLoadGA) {
|
||||
window.olLoadGA()
|
||||
}
|
||||
}
|
||||
|
||||
function setConsent(value) {
|
||||
document.querySelector('.cookie-banner').classList.add('hidden')
|
||||
const cookieDomain = getMeta('ol-ExposedSettings').cookieDomain
|
||||
const oneYearInSeconds = 60 * 60 * 24 * 365
|
||||
const cookieAttributes =
|
||||
'; path=/' +
|
||||
'; domain=' +
|
||||
cookieDomain +
|
||||
'; max-age=' +
|
||||
oneYearInSeconds +
|
||||
'; SameSite=Lax; Secure'
|
||||
if (value === 'all') {
|
||||
document.cookie = 'oa=1' + cookieAttributes
|
||||
loadGA()
|
||||
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: true }))
|
||||
} else {
|
||||
document.cookie = 'oa=0' + cookieAttributes
|
||||
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: false }))
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
getMeta('ol-ExposedSettings').gaToken ||
|
||||
getMeta('ol-ExposedSettings').gaTokenV4 ||
|
||||
getMeta('ol-ExposedSettings').propensityId ||
|
||||
getMeta('ol-ExposedSettings').hotjarId
|
||||
) {
|
||||
document
|
||||
.querySelectorAll('[data-ol-cookie-banner-set-consent]')
|
||||
.forEach(el => {
|
||||
el.addEventListener('click', function (e) {
|
||||
e.preventDefault()
|
||||
const consentType = el.getAttribute('data-ol-cookie-banner-set-consent')
|
||||
setConsent(consentType)
|
||||
})
|
||||
})
|
||||
|
||||
const oaCookie = document.cookie.split('; ').find(c => c.startsWith('oa='))
|
||||
if (!oaCookie) {
|
||||
const cookieBannerEl = document.querySelector('.cookie-banner')
|
||||
if (cookieBannerEl) {
|
||||
cookieBannerEl.classList.remove('hidden')
|
||||
}
|
||||
}
|
||||
}
|
32
services/web/frontend/js/features/cookie-banner/index.ts
Normal file
32
services/web/frontend/js/features/cookie-banner/index.ts
Normal file
|
@ -0,0 +1,32 @@
|
|||
import {
|
||||
CookieConsentValue,
|
||||
cookieBannerRequired,
|
||||
hasMadeCookieChoice,
|
||||
setConsent,
|
||||
} from '@/features/cookie-banner/utils'
|
||||
|
||||
function toggleCookieBanner(hidden: boolean) {
|
||||
const cookieBannerEl = document.querySelector('.cookie-banner')
|
||||
if (cookieBannerEl) {
|
||||
cookieBannerEl.classList.toggle('hidden', hidden)
|
||||
}
|
||||
}
|
||||
|
||||
if (cookieBannerRequired()) {
|
||||
document
|
||||
.querySelectorAll('[data-ol-cookie-banner-set-consent]')
|
||||
.forEach(el => {
|
||||
el.addEventListener('click', function (e) {
|
||||
e.preventDefault()
|
||||
toggleCookieBanner(true)
|
||||
const consentType = el.getAttribute(
|
||||
'data-ol-cookie-banner-set-consent'
|
||||
) as CookieConsentValue | null
|
||||
setConsent(consentType)
|
||||
})
|
||||
})
|
||||
|
||||
if (!hasMadeCookieChoice()) {
|
||||
toggleCookieBanner(false)
|
||||
}
|
||||
}
|
43
services/web/frontend/js/features/cookie-banner/utils.ts
Normal file
43
services/web/frontend/js/features/cookie-banner/utils.ts
Normal file
|
@ -0,0 +1,43 @@
|
|||
import getMeta from '@/utils/meta'
|
||||
|
||||
export type CookieConsentValue = 'all' | 'essential'
|
||||
|
||||
function loadGA() {
|
||||
if (window.olLoadGA) {
|
||||
window.olLoadGA()
|
||||
}
|
||||
}
|
||||
|
||||
export function setConsent(value: CookieConsentValue | null) {
|
||||
const cookieDomain = getMeta('ol-ExposedSettings').cookieDomain
|
||||
const oneYearInSeconds = 60 * 60 * 24 * 365
|
||||
const cookieAttributes =
|
||||
'; path=/' +
|
||||
'; domain=' +
|
||||
cookieDomain +
|
||||
'; max-age=' +
|
||||
oneYearInSeconds +
|
||||
'; SameSite=Lax; Secure'
|
||||
if (value === 'all') {
|
||||
document.cookie = 'oa=1' + cookieAttributes
|
||||
loadGA()
|
||||
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: true }))
|
||||
} else {
|
||||
document.cookie = 'oa=0' + cookieAttributes
|
||||
window.dispatchEvent(new CustomEvent('cookie-consent', { detail: false }))
|
||||
}
|
||||
}
|
||||
|
||||
export function cookieBannerRequired() {
|
||||
const exposedSettings = getMeta('ol-ExposedSettings')
|
||||
return Boolean(
|
||||
exposedSettings.gaToken ||
|
||||
exposedSettings.gaTokenV4 ||
|
||||
exposedSettings.propensityId ||
|
||||
exposedSettings.hotjarId
|
||||
)
|
||||
}
|
||||
|
||||
export function hasMadeCookieChoice() {
|
||||
return document.cookie.split('; ').some(c => c.startsWith('oa='))
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import { useTranslation, Trans } from 'react-i18next'
|
||||
import { FetchError } from '../../../../infrastructure/fetch-json'
|
||||
import RedirectToLogin from './redirect-to-login'
|
||||
import {
|
||||
|
@ -7,6 +7,7 @@ import {
|
|||
InvalidFilenameError,
|
||||
} from '../../errors'
|
||||
import DangerMessage from './danger-message'
|
||||
import getMeta from '@/utils/meta'
|
||||
|
||||
// TODO: Update the error type when we properly type FileTreeActionableContext
|
||||
export default function ErrorMessage({
|
||||
|
@ -15,6 +16,7 @@ export default function ErrorMessage({
|
|||
error: string | Record<string, any>
|
||||
}) {
|
||||
const { t } = useTranslation()
|
||||
const { isOverleaf } = getMeta('ol-ExposedSettings')
|
||||
const fileNameLimit = 150
|
||||
|
||||
// the error is a string
|
||||
|
@ -46,6 +48,22 @@ export default function ErrorMessage({
|
|||
</DangerMessage>
|
||||
)
|
||||
|
||||
case 'invalid_upload_request':
|
||||
if (!isOverleaf) {
|
||||
return (
|
||||
<DangerMessage>{t('generic_something_went_wrong')}</DangerMessage>
|
||||
)
|
||||
}
|
||||
return (
|
||||
<DangerMessage>
|
||||
<Trans
|
||||
i18nKey="invalid_upload_request"
|
||||
// eslint-disable-next-line jsx-a11y/anchor-has-content, react/jsx-key
|
||||
components={[<a href="/contact" target="_blank" />]}
|
||||
/>
|
||||
</DangerMessage>
|
||||
)
|
||||
|
||||
case 'duplicate_file_name':
|
||||
return (
|
||||
<DangerMessage>
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
import { MessageProps } from '@/features/chat/components/message'
|
||||
import { User } from '../../../../../../types/user'
|
||||
import { getHueForUserId } from '@/shared/utils/colors'
|
||||
import {
|
||||
getBackgroundColorForUserId,
|
||||
hslStringToLuminance,
|
||||
} from '@/shared/utils/colors'
|
||||
import MessageContent from '@/features/chat/components/message-content'
|
||||
import classNames from 'classnames'
|
||||
import MaterialIcon from '@/shared/components/material-icon'
|
||||
import { t } from 'i18next'
|
||||
|
||||
function hue(user?: User) {
|
||||
return user ? getHueForUserId(user.id) : 0
|
||||
}
|
||||
|
||||
function getAvatarStyle(user?: User) {
|
||||
if (!user?.id) {
|
||||
// Deleted user
|
||||
|
@ -20,9 +19,15 @@ function getAvatarStyle(user?: User) {
|
|||
}
|
||||
}
|
||||
|
||||
const backgroundColor = getBackgroundColorForUserId(user.id)
|
||||
|
||||
return {
|
||||
borderColor: `hsl(${hue(user)}, 85%, 40%)`,
|
||||
backgroundColor: `hsl(${hue(user)}, 85%, 40%`,
|
||||
borderColor: backgroundColor,
|
||||
backgroundColor,
|
||||
color:
|
||||
hslStringToLuminance(backgroundColor) < 0.5
|
||||
? 'var(--content-primary-dark)'
|
||||
: 'var(--content-primary)',
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,11 @@ import {
|
|||
DropdownToggle,
|
||||
} from '@/features/ui/components/bootstrap-5/dropdown-menu'
|
||||
import OLTooltip from '@/features/ui/components/ol/ol-tooltip'
|
||||
import { getBackgroundColorForUserId } from '@/shared/utils/colors'
|
||||
import {
|
||||
getBackgroundColorForUserId,
|
||||
hslStringToLuminance,
|
||||
} from '@/shared/utils/colors'
|
||||
import classNames from 'classnames'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
|
@ -86,9 +90,16 @@ const OnlineUserWidget = ({
|
|||
|
||||
const OnlineUserCircle = ({ user }: { user: OnlineUser }) => {
|
||||
const backgroundColor = getBackgroundColorForUserId(user.user_id)
|
||||
const luminance = hslStringToLuminance(backgroundColor)
|
||||
const [character] = [...user.name]
|
||||
return (
|
||||
<span className="online-user-circle" style={{ backgroundColor }}>
|
||||
<span
|
||||
className={classNames('online-user-circle', {
|
||||
'online-user-circle-light-font': luminance < 0.5,
|
||||
'online-user-circle-dark-font': luminance >= 0.5,
|
||||
})}
|
||||
style={{ backgroundColor }}
|
||||
>
|
||||
{character}
|
||||
</span>
|
||||
)
|
||||
|
|
|
@ -20,6 +20,7 @@ import Footer from '@/features/ui/components/bootstrap-5/footer/footer'
|
|||
import SidebarDsNav from '@/features/project-list/components/sidebar/sidebar-ds-nav'
|
||||
import SystemMessages from '@/shared/components/system-messages'
|
||||
import overleafLogo from '@/shared/svgs/overleaf-a-ds-solution-mallard.svg'
|
||||
import CookieBanner from '@/shared/components/cookie-banner'
|
||||
|
||||
export function ProjectListDsNav() {
|
||||
const navbarProps = getMeta('ol-navbar')
|
||||
|
@ -125,6 +126,7 @@ export function ProjectListDsNav() {
|
|||
</div>
|
||||
<Footer {...footerProps} />
|
||||
</div>
|
||||
<CookieBanner />
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
|
|
|
@ -18,6 +18,7 @@ import Footer from '@/features/ui/components/bootstrap-5/footer/footer'
|
|||
import WelcomePageContent from '@/features/project-list/components/welcome-page-content'
|
||||
import { ProjectListDsNav } from '@/features/project-list/components/project-list-ds-nav'
|
||||
import { DsNavStyleProvider } from '@/features/project-list/components/use-is-ds-nav'
|
||||
import CookieBanner from '@/shared/components/cookie-banner'
|
||||
|
||||
function ProjectListRoot() {
|
||||
const { isReady } = useWaitForI18n()
|
||||
|
@ -88,9 +89,12 @@ function ProjectListPageContent() {
|
|||
|
||||
if (totalProjectsCount === 0) {
|
||||
return (
|
||||
<DefaultPageContentWrapper>
|
||||
<WelcomePageContent />
|
||||
</DefaultPageContentWrapper>
|
||||
<>
|
||||
<DefaultPageContentWrapper>
|
||||
<WelcomePageContent />
|
||||
</DefaultPageContentWrapper>
|
||||
<CookieBanner />
|
||||
</>
|
||||
)
|
||||
}
|
||||
return (
|
||||
|
|
|
@ -204,7 +204,8 @@ function SSOLinkingWidgetContainer({
|
|||
const { t } = useTranslation()
|
||||
const { unlink } = useSSOContext()
|
||||
|
||||
let description = ''
|
||||
let description = subscription.provider.descriptionKey ||
|
||||
`${t('login_with_service', { service: subscription.provider.name, })}.`
|
||||
switch (subscription.providerId) {
|
||||
case 'collabratec':
|
||||
description = t('linked_collabratec_description')
|
||||
|
|
|
@ -4,6 +4,7 @@ import { FetchError } from '../../../../infrastructure/fetch-json'
|
|||
import IEEELogo from '../../../../shared/svgs/ieee-logo'
|
||||
import GoogleLogo from '../../../../shared/svgs/google-logo'
|
||||
import OrcidLogo from '../../../../shared/svgs/orcid-logo'
|
||||
import OpenIDLogo from '../../../../shared/svgs/openid-logo'
|
||||
import LinkingStatus from './status'
|
||||
import OLButton from '@/features/ui/components/ol/ol-button'
|
||||
import OLModal, {
|
||||
|
@ -17,6 +18,7 @@ const providerLogos: { readonly [p: string]: JSX.Element } = {
|
|||
collabratec: <IEEELogo />,
|
||||
google: <GoogleLogo />,
|
||||
orcid: <OrcidLogo />,
|
||||
oidc: <OpenIDLogo />,
|
||||
}
|
||||
|
||||
type SSOLinkingWidgetProps = {
|
||||
|
@ -66,7 +68,7 @@ export function SSOLinkingWidget({
|
|||
|
||||
return (
|
||||
<div className="settings-widget-container">
|
||||
<div>{providerLogos[providerId]}</div>
|
||||
<div>{providerLogos[providerId] || providerLogos['oidc']}</div>
|
||||
<div className="description-container">
|
||||
<div className="title-row">
|
||||
<h4 id={providerId}>{title}</h4>
|
||||
|
|
|
@ -39,11 +39,7 @@ function CanOnlyLogInThroughSSO() {
|
|||
return (
|
||||
<p>
|
||||
<Trans
|
||||
i18nKey="you_cant_add_or_change_password_due_to_sso"
|
||||
components={[
|
||||
// eslint-disable-next-line react/jsx-key, jsx-a11y/anchor-has-content
|
||||
<a href="/learn/how-to/Logging_in_with_Group_single_sign-on" />,
|
||||
]}
|
||||
i18nKey="you_cant_add_or_change_password_due_to_ldap_or_sso"
|
||||
/>
|
||||
</p>
|
||||
)
|
||||
|
|
58
services/web/frontend/js/shared/components/cookie-banner.tsx
Normal file
58
services/web/frontend/js/shared/components/cookie-banner.tsx
Normal file
|
@ -0,0 +1,58 @@
|
|||
import OLButton from '@/features/ui/components/ol/ol-button'
|
||||
import { Trans, useTranslation } from 'react-i18next'
|
||||
import React, { useState } from 'react'
|
||||
import {
|
||||
CookieConsentValue,
|
||||
cookieBannerRequired,
|
||||
hasMadeCookieChoice,
|
||||
setConsent,
|
||||
} from '@/features/cookie-banner/utils'
|
||||
|
||||
function CookieBanner() {
|
||||
const { t } = useTranslation()
|
||||
const [hidden, setHidden] = useState(
|
||||
() => !cookieBannerRequired() || hasMadeCookieChoice()
|
||||
)
|
||||
|
||||
function makeCookieChoice(value: CookieConsentValue) {
|
||||
setConsent(value)
|
||||
setHidden(true)
|
||||
}
|
||||
|
||||
if (hidden) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<section
|
||||
className="cookie-banner hidden-print"
|
||||
aria-label={t('cookie_banner')}
|
||||
>
|
||||
<div className="cookie-banner-content">
|
||||
<Trans
|
||||
i18nKey="cookie_banner_info"
|
||||
// eslint-disable-next-line react/jsx-key, jsx-a11y/anchor-has-content
|
||||
components={[<a href="/legal#Cookies" />]}
|
||||
/>
|
||||
</div>
|
||||
<div className="cookie-banner-actions">
|
||||
<OLButton
|
||||
variant="link"
|
||||
size="sm"
|
||||
onClick={() => makeCookieChoice('essential')}
|
||||
>
|
||||
{t('essential_cookies_only')}
|
||||
</OLButton>
|
||||
<OLButton
|
||||
variant="primary"
|
||||
size="sm"
|
||||
onClick={() => makeCookieChoice('all')}
|
||||
>
|
||||
{t('accept_all_cookies')}
|
||||
</OLButton>
|
||||
</div>
|
||||
</section>
|
||||
)
|
||||
}
|
||||
|
||||
export default CookieBanner
|
27
services/web/frontend/js/shared/svgs/openid-logo.jsx
Normal file
27
services/web/frontend/js/shared/svgs/openid-logo.jsx
Normal file
|
@ -0,0 +1,27 @@
|
|||
function OpenIDLogo() {
|
||||
return (
|
||||
<svg
|
||||
width="40"
|
||||
height="40"
|
||||
viewBox="0 0 40 40"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<rect width="40" height="40" fill="white" />
|
||||
<path
|
||||
d="M18.185415 36.042565 23.298193 32.35627 23.060446 3.090316 18.185415 6.8918455Z"
|
||||
fill="#ff8e00"
|
||||
/>
|
||||
<path
|
||||
d="M18.246064 36.042565C-0.37463741 32.997945 -1.0248032 15.054095 18.13083 11.143396l0.05944 3.322396 c -13.3672163 2.225847 -11.6629563 14.187201 0 15.92785l0.05944 3.127104Z"
|
||||
fill="#626262"
|
||||
/>
|
||||
<path
|
||||
d="M23.219348 14.720521c2.279219 0.01577 4.262468 1.057732 6.237225 2.117891l-2.917255 2.176115h9.317022l0.05701 -6.371868 -2.917255 2.176115C30.03396 13.32315 27.308358 11.530342 23.169615 11.496378Z"
|
||||
fill="#626262"
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export default OpenIDLogo
|
||||
|
|
@ -34,6 +34,51 @@ export function getBackgroundColorForUserId(userId?: string) {
|
|||
return `hsl(${getHueForUserId(userId)}, 70%, 50%)`
|
||||
}
|
||||
|
||||
export function hslStringToLuminance(hslString: string): number {
|
||||
// First extract the individual components from the HSL string
|
||||
const hslSplit = hslString.slice(4).split(')')[0].split(',')
|
||||
|
||||
const h = Number(hslSplit[0])
|
||||
const s = Number(hslSplit[1].slice(0, -1)) / 100
|
||||
const l = Number(hslSplit[2].slice(0, -1)) / 100
|
||||
|
||||
// Then we need to convert HSL to RGB
|
||||
const c = (1 - Math.abs(2 * l - 1)) * s
|
||||
const x = c * (1 - Math.abs(((h / 60) % 2) - 1))
|
||||
const m = l - c / 2
|
||||
let r = 0
|
||||
let g = 0
|
||||
let b = 0
|
||||
if (h >= 0 && h < 60) {
|
||||
r = c + m
|
||||
g = x + m
|
||||
b = m
|
||||
} else if (h >= 60 && h < 120) {
|
||||
r = x + m
|
||||
g = c + m
|
||||
b = m
|
||||
} else if (h >= 120 && h < 180) {
|
||||
r = m
|
||||
g = c + m
|
||||
b = x + m
|
||||
} else if (h >= 180 && h < 240) {
|
||||
r = m
|
||||
g = x + m
|
||||
b = c + m
|
||||
} else if (h >= 240 && h < 300) {
|
||||
r = x + m
|
||||
g = m
|
||||
b = c + m
|
||||
} else if (h >= 300 && h < 360) {
|
||||
r = c + m
|
||||
g = m
|
||||
b = x + m
|
||||
}
|
||||
|
||||
// Finally we calculate the luminance
|
||||
return 0.2126 * r + 0.7152 * g + 0.0722 * b
|
||||
}
|
||||
|
||||
const cachedHues = new Map()
|
||||
|
||||
export function getHueForId(id: string) {
|
||||
|
|
|
@ -124,4 +124,12 @@
|
|||
box-sizing: border-box;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.online-user-circle-light-font {
|
||||
color: var(--content-primary-dark);
|
||||
}
|
||||
|
||||
.online-user-circle-dark-font {
|
||||
color: var(--content-primary);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -524,6 +524,10 @@ $z-index-group-member-picker-list: 1;
|
|||
&[data-ol-plans-new-group-member-picker-button='group-all'] {
|
||||
height: $group-member-picker-top-height;
|
||||
}
|
||||
|
||||
.material-symbols {
|
||||
pointer-events: none;
|
||||
}
|
||||
}
|
||||
|
||||
ul.plans-new-group-member-picker-list {
|
||||
|
|
|
@ -255,6 +255,12 @@
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
> * {
|
||||
@include media-breakpoint-up(md) {
|
||||
border-left: 1px solid var(--border-divider);
|
||||
}
|
||||
}
|
||||
|
||||
.project-ds-nav-content {
|
||||
flex-grow: 1;
|
||||
overflow-y: auto;
|
||||
|
@ -263,10 +269,20 @@
|
|||
|
||||
@include media-breakpoint-up(md) {
|
||||
border-top-left-radius: var(--border-radius-large);
|
||||
border-left: 1px solid var(--border-divider);
|
||||
border-top: 1px solid var(--border-divider);
|
||||
}
|
||||
}
|
||||
|
||||
.cookie-banner {
|
||||
position: static;
|
||||
background-color: var(--bg-light-primary);
|
||||
|
||||
// Remove the parts of the shadow that stick out of the sides
|
||||
clip-path: inset(-13px 0 0 0);
|
||||
|
||||
// Prevent the cookie banner being overlaid on top of the navigation
|
||||
z-index: auto;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@
|
|||
"about_to_trash_projects": "You are about to trash the following projects:",
|
||||
"abstract": "Abstract",
|
||||
"accept": "Accept",
|
||||
"accept_all_cookies": "Accept all cookies",
|
||||
"accept_and_continue": "Accept and continue",
|
||||
"accept_change": "Accept change",
|
||||
"accept_change_error_description": "There was an error accepting a track change. Please try again in a few moments.",
|
||||
|
@ -163,6 +164,7 @@
|
|||
"already_have_sl_account": "Already have an __appName__ account?",
|
||||
"also": "Also",
|
||||
"alternatively_create_new_institution_account": "Alternatively, you can create a <b>new account</b> with your institution email (<b>__email__</b>) by clicking <b>__clickText__</b>.",
|
||||
"alternatively_create_local_admin_account": "Alternatively, you can create __appName__ local admin account.",
|
||||
"an_email_has_already_been_sent_to": "An email has already been sent to <0>__email__</0>. Please wait and try again later.",
|
||||
"an_error_occured_while_restoring_project": "An error occured while restoring the project",
|
||||
"an_error_occurred_when_verifying_the_coupon_code": "An error occurred when verifying the coupon code",
|
||||
|
@ -433,6 +435,8 @@
|
|||
"continue_using_free_features": "Continue using our free features",
|
||||
"continue_with_free_plan": "Continue with free plan",
|
||||
"continue_with_service": "Continue with __service__",
|
||||
"cookie_banner": "Cookie banner",
|
||||
"cookie_banner_info": "We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our <0>cookie policy</0>.",
|
||||
"copied": "Copied",
|
||||
"copy": "Copy",
|
||||
"copy_code": "Copy code",
|
||||
|
@ -612,6 +616,7 @@
|
|||
"dropbox_synced": "Overleaf and Dropbox have processed all updates. Note that your local Dropbox might still be synchronizing",
|
||||
"dropbox_unlinked_because_access_denied": "Your Dropbox account has been unlinked because the Dropbox service rejected your stored credentials. Please relink your Dropbox account to continue using it with Overleaf.",
|
||||
"dropbox_unlinked_because_full": "Your Dropbox account has been unlinked because it is full, and we can no longer send updates to it. Please free up some space and relink your Dropbox account to continue using it with Overleaf.",
|
||||
"dropbox_unlinked_because_suspended": "We’ve unlinked your Dropbox account because it’s been suspended by Dropbox. You’ll be able to relink once you’ve resolved the issue with Dropbox.",
|
||||
"dropbox_unlinked_premium_feature": "<0>Your Dropbox account has been unlinked</0> because Dropbox Sync is a premium feature that you had through an institutional license.",
|
||||
"due_date": "Due __date__",
|
||||
"due_today": "Due today",
|
||||
|
@ -700,6 +705,7 @@
|
|||
"error_performing_request": "An error has occurred while performing your request.",
|
||||
"error_processing_file": "Sorry, something went wrong processing this file. Please try again.",
|
||||
"es": "Spanish",
|
||||
"essential_cookies_only": "Essential cookies only",
|
||||
"estimated_number_of_overleaf_users": "Estimated number of __appName__ users",
|
||||
"every": "per",
|
||||
"everything_in_free_plus": "Everything in Free, plus…",
|
||||
|
@ -1107,6 +1113,7 @@
|
|||
"invalid_password_too_similar": "Password is too similar to parts of email address",
|
||||
"invalid_regular_expression": "Invalid regular expression",
|
||||
"invalid_request": "Invalid Request. Please correct the data and try again.",
|
||||
"invalid_upload_request": "The upload failed. If the problem persists, <0>let us know</0>.",
|
||||
"invalid_zip_file": "Invalid zip file",
|
||||
"invite": "Invite",
|
||||
"invite_expired": "The invite may have expired",
|
||||
|
@ -1250,6 +1257,7 @@
|
|||
"loading_prices": "loading prices",
|
||||
"loading_recent_github_commits": "Loading recent commits",
|
||||
"loading_writefull": "Loading Writefull",
|
||||
"local_account": "Local account",
|
||||
"log_entry_description": "Log entry with level: __level__",
|
||||
"log_entry_maximum_entries": "Maximum log entries limit hit",
|
||||
"log_entry_maximum_entries_enable_stop_on_first_error": "Try to fix the first error and recompile. Often one error causes many later error messages. You can <0>Enable “Stop on first error”</0> to focus on fixing errors. We recommend fixing errors as soon as possible; letting them accumulate may lead to hard-to-debug and fatal errors. <1>Learn more</1>",
|
||||
|
@ -2695,8 +2703,10 @@
|
|||
"you_can_select_or_invite_collaborator": "You can select or invite __count__ collaborator on your current plan. Upgrade to add more editors or reviewers.",
|
||||
"you_can_select_or_invite_collaborator_plural": "You can select or invite __count__ collaborators on your current plan. Upgrade to add more editors or reviewers.",
|
||||
"you_can_still_use_your_premium_features": "You can still use your premium features until the pause becomes active.",
|
||||
"you_cant_add_or_change_password_due_to_ldap_or_sso": "You can’t add or change your password because your group or organization uses LDAP or SSO.",
|
||||
"you_cant_add_or_change_password_due_to_sso": "You can’t add or change your password because your group or organization uses <0>single sign-on (SSO)</0>.",
|
||||
"you_cant_join_this_group_subscription": "You can’t join this group subscription",
|
||||
"you_cant_reset_password_due_to_ldap_or_sso": "You can’t reset your password because your group or organization uses LDAP or SSO. Contact your system administrator.",
|
||||
"you_cant_reset_password_due_to_sso": "You can’t reset your password because your group or organization uses SSO. <0>Log in with SSO</0>.",
|
||||
"you_dont_have_any_add_ons_on_your_account": "You don’t have any add-ons on your account.",
|
||||
"you_dont_have_any_repositories": "You don’t have any repositories",
|
||||
|
|
|
@ -0,0 +1,112 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import passport from 'passport'
|
||||
import EmailHelper from '../../../../../app/src/Features/Helpers/EmailHelper.js'
|
||||
import { handleAuthenticateErrors } from '../../../../../app/src/Features/Authentication/AuthenticationErrors.js'
|
||||
import AuthenticationController from '../../../../../app/src/Features/Authentication/AuthenticationController.js'
|
||||
import LDAPAuthenticationManager from './LDAPAuthenticationManager.mjs'
|
||||
|
||||
const LDAPAuthenticationController = {
|
||||
passportLogin(req, res, next) {
|
||||
// This function is middleware which wraps the passport.authenticate middleware,
|
||||
// so we can send back our custom `{message: {text: "", type: ""}}` responses on failure,
|
||||
// and send a `{redir: ""}` response on success
|
||||
passport.authenticate(
|
||||
'ldapauth',
|
||||
{ keepSessionInfo: true },
|
||||
async function (err, user, info, status) {
|
||||
if (err) { //we cannot be here as long as errors are treated as fails
|
||||
return next(err)
|
||||
}
|
||||
if (user) {
|
||||
// `user` is either a user object or false
|
||||
AuthenticationController.setAuditInfo(req, {
|
||||
method: 'LDAP password login',
|
||||
})
|
||||
|
||||
try {
|
||||
await AuthenticationController.promises.finishLogin(user, req, res)
|
||||
res.status(200)
|
||||
return
|
||||
} catch (err) {
|
||||
return next(err)
|
||||
}
|
||||
} else {
|
||||
if (status != 401) {
|
||||
logger.warn(status, 'LDAP: ' + info.message)
|
||||
}
|
||||
if (EmailHelper.parseEmail(req.body.email)) return next() //Try local authentication
|
||||
if (info.redir != null) {
|
||||
return res.json({ redir: info.redir })
|
||||
} else {
|
||||
res.status(status || info.status || 401)
|
||||
delete info.status
|
||||
info.type = 'error'
|
||||
info.key = 'invalid-password-retry-or-reset'
|
||||
const body = { message: info }
|
||||
const { errorReason } = info
|
||||
if (errorReason) {
|
||||
body.errorReason = errorReason
|
||||
delete info.errorReason
|
||||
}
|
||||
return res.json(body)
|
||||
}
|
||||
}
|
||||
}
|
||||
)(req, res, next)
|
||||
},
|
||||
async doPassportLogin(req, profile, done) {
|
||||
let user, info
|
||||
try {
|
||||
;({ user, info } = await LDAPAuthenticationController._doPassportLogin(
|
||||
req,
|
||||
profile
|
||||
))
|
||||
} catch (error) {
|
||||
return done(error)
|
||||
}
|
||||
return done(undefined, user, info)
|
||||
},
|
||||
async _doPassportLogin(req, profile) {
|
||||
const { fromKnownDevice } = AuthenticationController.getAuditInfo(req)
|
||||
const auditLog = {
|
||||
ipAddress: req.ip,
|
||||
info: { method: 'LDAP password login', fromKnownDevice },
|
||||
}
|
||||
|
||||
let user, isPasswordReused
|
||||
try {
|
||||
user = await LDAPAuthenticationManager.promises.findOrCreateUser(profile, auditLog)
|
||||
} catch (error) {
|
||||
return {
|
||||
user: false,
|
||||
info: handleAuthenticateErrors(error, req),
|
||||
}
|
||||
}
|
||||
if (user && AuthenticationController.captchaRequiredForLogin(req, user)) {
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
text: req.i18n.translate('cannot_verify_user_not_robot'),
|
||||
type: 'error',
|
||||
errorReason: 'cannot_verify_user_not_robot',
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
} else if (user) {
|
||||
user.externalAuth = 'ldap'
|
||||
return { user, info: undefined }
|
||||
} else { //we cannot be here, something is terribly wrong
|
||||
logger.debug({ email : profile.mail }, 'failed LDAP log in')
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
type: 'error',
|
||||
text: 'Unknown error',
|
||||
status: 500,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
export default LDAPAuthenticationController
|
|
@ -0,0 +1,75 @@
|
|||
import Settings from '@overleaf/settings'
|
||||
import { callbackify } from '@overleaf/promise-utils'
|
||||
import UserCreator from '../../../../../app/src/Features/User/UserCreator.js'
|
||||
import { ParallelLoginError } from '../../../../../app/src/Features/Authentication/AuthenticationErrors.js'
|
||||
import { User } from '../../../../../app/src/models/User.js'
|
||||
import { splitFullName } from '../../../utils.mjs'
|
||||
|
||||
const LDAPAuthenticationManager = {
|
||||
async findOrCreateUser(profile, auditLog) {
|
||||
//user is already authenticated in LDAP
|
||||
const {
|
||||
attEmail,
|
||||
attFirstName,
|
||||
attLastName,
|
||||
attName,
|
||||
attAdmin,
|
||||
valAdmin,
|
||||
updateUserDetailsOnLogin,
|
||||
} = Settings.ldap
|
||||
|
||||
const email = Array.isArray(profile[attEmail])
|
||||
? profile[attEmail][0].toLowerCase()
|
||||
: profile[attEmail].toLowerCase()
|
||||
let nameParts = ["",""]
|
||||
if ((!attFirstName || !attLastName) && attName) {
|
||||
nameParts = splitFullName(profile[attName] || "")
|
||||
}
|
||||
const firstName = attFirstName ? (profile[attFirstName] || "") : nameParts[0]
|
||||
let lastName = attLastName ? (profile[attLastName] || "") : nameParts[1]
|
||||
if (!firstName && !lastName) lastName = email
|
||||
let isAdmin = false
|
||||
if( attAdmin && valAdmin ) {
|
||||
isAdmin = Array.isArray(profile[attAdmin]) ? profile[attAdmin].includes(valAdmin) :
|
||||
profile[attAdmin] === valAdmin
|
||||
}
|
||||
let user = await User.findOne({ 'email': email }).exec()
|
||||
|
||||
if( !user ) {
|
||||
user = await UserCreator.promises.createNewUser(
|
||||
{
|
||||
email: email,
|
||||
first_name: firstName,
|
||||
last_name: lastName,
|
||||
isAdmin: isAdmin,
|
||||
holdingAccount: false,
|
||||
}
|
||||
)
|
||||
await User.updateOne(
|
||||
{ _id: user._id },
|
||||
{ $set : { 'emails.0.confirmedAt' : Date.now() } }
|
||||
).exec() //email of ldap user is confirmed
|
||||
}
|
||||
let userDetails = updateUserDetailsOnLogin ? { first_name : firstName, last_name: lastName } : {}
|
||||
if( attAdmin && valAdmin ) {
|
||||
user.isAdmin = isAdmin
|
||||
userDetails.isAdmin = isAdmin
|
||||
}
|
||||
const result = await User.updateOne(
|
||||
{ _id: user._id, loginEpoch: user.loginEpoch },
|
||||
{
|
||||
$inc: { loginEpoch: 1 },
|
||||
$set: userDetails,
|
||||
$unset: { hashedPassword: "" },
|
||||
}
|
||||
).exec()
|
||||
if (result.modifiedCount !== 1) {
|
||||
throw new ParallelLoginError()
|
||||
}
|
||||
return user
|
||||
},
|
||||
}
|
||||
|
||||
export default {
|
||||
promises: LDAPAuthenticationManager,
|
||||
}
|
|
@ -0,0 +1,120 @@
|
|||
import Settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger'
|
||||
import { promisify } from 'util'
|
||||
import passport from 'passport'
|
||||
import ldapjs from 'ldapauth-fork/node_modules/ldapjs/lib/index.js'
|
||||
import UserGetter from '../../../../../app/src/Features/User/UserGetter.js'
|
||||
import { splitFullName } from '../../../utils.mjs'
|
||||
|
||||
function _searchLDAP(client, baseDN, options) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const searchEntries = []
|
||||
client.search(baseDN, options, (error, res) => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
res.on('searchEntry', entry => searchEntries.push(entry.object))
|
||||
res.on('error', reject)
|
||||
res.on('end', () => resolve(searchEntries))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function fetchLDAPContacts(userId, contacts) {
|
||||
if (!Settings.ldap?.enable || !process.env.OVERLEAF_LDAP_CONTACTS_FILTER) {
|
||||
return []
|
||||
}
|
||||
|
||||
const ldapOptions = passport._strategy('ldapauth').options.server
|
||||
const { attEmail, attFirstName = "", attLastName = "", attName = "" } = Settings.ldap
|
||||
const {
|
||||
url,
|
||||
timeout,
|
||||
connectTimeout,
|
||||
tlsOptions,
|
||||
starttls,
|
||||
bindDN,
|
||||
bindCredentials
|
||||
} = ldapOptions
|
||||
const searchBase = process.env.OVERLEAF_LDAP_CONTACTS_SEARCH_BASE || ldapOptions.searchBase
|
||||
const searchScope = process.env.OVERLEAF_LDAP_CONTACTS_SEARCH_SCOPE || 'sub'
|
||||
const ldapConfig = { url, timeout, connectTimeout, tlsOptions }
|
||||
|
||||
let ldapUsers
|
||||
let client
|
||||
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
client = ldapjs.createClient(ldapConfig)
|
||||
client.on('error', (error) => { reject(error) })
|
||||
client.on('connectTimeout', (error) => { reject(error) })
|
||||
client.on('connect', () => { resolve() })
|
||||
})
|
||||
|
||||
if (starttls) {
|
||||
const starttlsAsync = promisify(client.starttls).bind(client)
|
||||
await starttlsAsync(tlsOptions, null)
|
||||
}
|
||||
const bindAsync = promisify(client.bind).bind(client)
|
||||
await bindAsync(bindDN, bindCredentials)
|
||||
|
||||
async function createContactsSearchFilter(client, ldapOptions, userId, contactsFilter) {
|
||||
const searchProperty = process.env.OVERLEAF_LDAP_CONTACTS_PROPERTY
|
||||
if (!searchProperty) {
|
||||
return contactsFilter
|
||||
}
|
||||
const email = await UserGetter.promises.getUserEmail(userId)
|
||||
const searchOptions = {
|
||||
scope: ldapOptions.searchScope,
|
||||
attributes: [searchProperty],
|
||||
filter: `(${Settings.ldap.attEmail}=${email})`
|
||||
}
|
||||
const searchBase = ldapOptions.searchBase
|
||||
const ldapUser = (await _searchLDAP(client, searchBase, searchOptions))[0]
|
||||
const searchPropertyValue = ldapUser ? ldapUser[searchProperty]
|
||||
: process.env.OVERLEAF_LDAP_CONTACTS_NON_LDAP_VALUE || 'IMATCHNOTHING'
|
||||
return contactsFilter.replace(/{{userProperty}}/g, searchPropertyValue)
|
||||
}
|
||||
|
||||
const filter = await createContactsSearchFilter(client, ldapOptions, userId, process.env.OVERLEAF_LDAP_CONTACTS_FILTER)
|
||||
const searchOptions = { scope: searchScope, attributes: [attEmail, attFirstName, attLastName, attName], filter }
|
||||
|
||||
ldapUsers = await _searchLDAP(client, searchBase, searchOptions)
|
||||
} catch (error) {
|
||||
logger.warn({ error }, 'Error in fetchLDAPContacts')
|
||||
return []
|
||||
} finally {
|
||||
client?.unbind()
|
||||
}
|
||||
|
||||
const newLDAPContacts = ldapUsers.reduce((acc, ldapUser) => {
|
||||
const email = Array.isArray(ldapUser[attEmail])
|
||||
? ldapUser[attEmail][0]?.toLowerCase()
|
||||
: ldapUser[attEmail]?.toLowerCase()
|
||||
if (!email) return acc
|
||||
if (!contacts.some(contact => contact.email === email)) {
|
||||
let nameParts = ["", ""]
|
||||
if ((!attFirstName || !attLastName) && attName) {
|
||||
nameParts = splitFullName(ldapUser[attName] || "")
|
||||
}
|
||||
const firstName = attFirstName ? (ldapUser[attFirstName] || "") : nameParts[0]
|
||||
const lastName = attLastName ? (ldapUser[attLastName] || "") : nameParts[1]
|
||||
acc.push({
|
||||
first_name: firstName,
|
||||
last_name: lastName,
|
||||
email: email,
|
||||
type: 'user'
|
||||
})
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
return newLDAPContacts.sort((a, b) =>
|
||||
a.last_name.localeCompare(b.last_name) ||
|
||||
a.first_name.localeCompare(b.first_name) ||
|
||||
a.email.localeCompare(b.email)
|
||||
)
|
||||
}
|
||||
|
||||
export default fetchLDAPContacts
|
|
@ -0,0 +1,108 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import passport from 'passport'
|
||||
import { Strategy as LDAPStrategy } from 'passport-ldapauth'
|
||||
import Settings from '@overleaf/settings'
|
||||
import PermissionsManager from '../../../../../app/src/Features/Authorization/PermissionsManager.js'
|
||||
import { readFilesContentFromEnv, numFromEnv, boolFromEnv } from '../../../utils.mjs'
|
||||
import LDAPAuthenticationController from './LDAPAuthenticationController.mjs'
|
||||
import fetchLDAPContacts from './LDAPContacts.mjs'
|
||||
|
||||
const LDAPModuleManager = {
|
||||
initSettings() {
|
||||
Settings.ldap = {
|
||||
enable: true,
|
||||
placeholder: process.env.OVERLEAF_LDAP_PLACEHOLDER || 'Username',
|
||||
attEmail: process.env.OVERLEAF_LDAP_EMAIL_ATT || 'mail',
|
||||
attFirstName: process.env.OVERLEAF_LDAP_FIRST_NAME_ATT,
|
||||
attLastName: process.env.OVERLEAF_LDAP_LAST_NAME_ATT,
|
||||
attName: process.env.OVERLEAF_LDAP_NAME_ATT,
|
||||
attAdmin: process.env.OVERLEAF_LDAP_IS_ADMIN_ATT,
|
||||
valAdmin: process.env.OVERLEAF_LDAP_IS_ADMIN_ATT_VALUE,
|
||||
updateUserDetailsOnLogin: boolFromEnv(process.env.OVERLEAF_LDAP_UPDATE_USER_DETAILS_ON_LOGIN),
|
||||
}
|
||||
},
|
||||
passportSetup(passport, callback) {
|
||||
const ldapOptions = {
|
||||
url: process.env.OVERLEAF_LDAP_URL,
|
||||
bindDN: process.env.OVERLEAF_LDAP_BIND_DN || "",
|
||||
bindCredentials: process.env.OVERLEAF_LDAP_BIND_CREDENTIALS || "",
|
||||
bindProperty: process.env.OVERLEAF_LDAP_BIND_PROPERTY,
|
||||
searchBase: process.env.OVERLEAF_LDAP_SEARCH_BASE,
|
||||
searchFilter: process.env.OVERLEAF_LDAP_SEARCH_FILTER,
|
||||
searchScope: process.env.OVERLEAF_LDAP_SEARCH_SCOPE || 'sub',
|
||||
searchAttributes: JSON.parse(process.env.OVERLEAF_LDAP_SEARCH_ATTRIBUTES || '[]'),
|
||||
cache: boolFromEnv(process.env.OVERLEAF_LDAP_CACHE),
|
||||
timeout: numFromEnv(process.env.OVERLEAF_LDAP_TIMEOUT),
|
||||
connectTimeout: numFromEnv(process.env.OVERLEAF_LDAP_CONNECT_TIMEOUT),
|
||||
starttls: boolFromEnv(process.env.OVERLEAF_LDAP_STARTTLS),
|
||||
tlsOptions: {
|
||||
ca: readFilesContentFromEnv(process.env.OVERLEAF_LDAP_TLS_OPTS_CA_PATH),
|
||||
rejectUnauthorized: boolFromEnv(process.env.OVERLEAF_LDAP_TLS_OPTS_REJECT_UNAUTH),
|
||||
}
|
||||
}
|
||||
try {
|
||||
passport.use(
|
||||
new LDAPStrategy(
|
||||
{
|
||||
server: ldapOptions,
|
||||
passReqToCallback: true,
|
||||
usernameField: 'email',
|
||||
passwordField: 'password',
|
||||
handleErrorsAsFailures: true,
|
||||
},
|
||||
LDAPAuthenticationController.doPassportLogin
|
||||
)
|
||||
)
|
||||
callback(null)
|
||||
} catch (error) {
|
||||
callback(error)
|
||||
}
|
||||
},
|
||||
|
||||
async getContacts(userId, contacts, callback) {
|
||||
try {
|
||||
const newContacts = await fetchLDAPContacts(userId, contacts)
|
||||
callback(null, newContacts)
|
||||
} catch (error) {
|
||||
callback(error)
|
||||
}
|
||||
},
|
||||
|
||||
initPolicy() {
|
||||
try {
|
||||
PermissionsManager.registerCapability('change-password', { default : true })
|
||||
PermissionsManager.registerCapability('use-ai', { default : false })
|
||||
} catch (error) {
|
||||
logger.info({}, error.message)
|
||||
}
|
||||
const ldapPolicyValidator = async ({ user, subscription }) => {
|
||||
// If user is not logged in, user.externalAuth is undefined,
|
||||
// in this case allow to change password if the user has a hashedPassword
|
||||
return user.externalAuth === 'ldap' || (user.externalAuth === undefined && !user.hashedPassword)
|
||||
}
|
||||
try {
|
||||
PermissionsManager.registerPolicy(
|
||||
'ldapPolicy',
|
||||
{ 'change-password' : false },
|
||||
{ validator: ldapPolicyValidator }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.info({}, error.message)
|
||||
}
|
||||
},
|
||||
|
||||
getGroupPolicyForUser(user, callback) {
|
||||
PermissionsManager.promises.getUserValidationStatus({
|
||||
user,
|
||||
groupPolicy : { 'ldapPolicy' : true },
|
||||
subscription : null
|
||||
}).then(userValidationMap => {
|
||||
let groupPolicy = Object.fromEntries(userValidationMap)
|
||||
callback(null, { groupPolicy })
|
||||
}).catch(error => {
|
||||
callback(error)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
export default LDAPModuleManager
|
|
@ -0,0 +1,19 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import RateLimiterMiddleware from '../../../../../app/src/Features/Security/RateLimiterMiddleware.js'
|
||||
import CaptchaMiddleware from '../../../../../app/src/Features/Captcha/CaptchaMiddleware.js'
|
||||
import AuthenticationController from '../../../../../app/src/Features/Authentication/AuthenticationController.js'
|
||||
import { overleafLoginRateLimiter } from '../../../../../app/src/infrastructure/RateLimiter.js'
|
||||
import LDAPAuthenticationController from './LDAPAuthenticationController.mjs'
|
||||
|
||||
export default {
|
||||
apply(webRouter) {
|
||||
logger.debug({}, 'Init LDAP router')
|
||||
webRouter.post('/login',
|
||||
RateLimiterMiddleware.rateLimit(overleafLoginRateLimiter), // rate limit IP (20 / 60s)
|
||||
RateLimiterMiddleware.loginRateLimitEmail(), // rate limit email (10 / 120s)
|
||||
CaptchaMiddleware.validateCaptcha('login'),
|
||||
LDAPAuthenticationController.passportLogin,
|
||||
AuthenticationController.passportLogin,
|
||||
)
|
||||
},
|
||||
}
|
17
services/web/modules/authentication/ldap/index.mjs
Normal file
17
services/web/modules/authentication/ldap/index.mjs
Normal file
|
@ -0,0 +1,17 @@
|
|||
let ldapModule = {}
|
||||
if (process.env.EXTERNAL_AUTH?.includes('ldap')) {
|
||||
const { default: LDAPModuleManager } = await import('./app/src/LDAPModuleManager.mjs')
|
||||
const { default: router } = await import('./app/src/LDAPRouter.mjs')
|
||||
LDAPModuleManager.initSettings()
|
||||
LDAPModuleManager.initPolicy()
|
||||
ldapModule = {
|
||||
name: 'ldap-authentication',
|
||||
hooks: {
|
||||
passportSetup: LDAPModuleManager.passportSetup,
|
||||
getContacts: LDAPModuleManager.getContacts,
|
||||
getGroupPolicyForUser: LDAPModuleManager.getGroupPolicyForUser,
|
||||
},
|
||||
router: router,
|
||||
}
|
||||
}
|
||||
export default ldapModule
|
18
services/web/modules/authentication/logout.mjs
Normal file
18
services/web/modules/authentication/logout.mjs
Normal file
|
@ -0,0 +1,18 @@
|
|||
let SAMLAuthenticationController
|
||||
if (process.env.EXTERNAL_AUTH.includes('saml')) {
|
||||
SAMLAuthenticationController = await import('./saml/app/src/SAMLAuthenticationController.mjs')
|
||||
}
|
||||
let OIDCAuthenticationController
|
||||
if (process.env.EXTERNAL_AUTH.includes('oidc')) {
|
||||
OIDCAuthenticationController = await import('./oidc/app/src/OIDCAuthenticationController.mjs')
|
||||
}
|
||||
export default async function logout(req, res, next) {
|
||||
switch(req.user.externalAuth) {
|
||||
case 'saml':
|
||||
return SAMLAuthenticationController.default.passportLogout(req, res, next)
|
||||
case 'oidc':
|
||||
return OIDCAuthenticationController.default.passportLogout(req, res, next)
|
||||
default:
|
||||
next()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,171 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import passport from 'passport'
|
||||
import Settings from '@overleaf/settings'
|
||||
import AuthenticationController from '../../../../../app/src/Features/Authentication/AuthenticationController.js'
|
||||
import UserController from '../../../../../app/src/Features/User/UserController.js'
|
||||
import ThirdPartyIdentityManager from '../../../../../app/src/Features/User/ThirdPartyIdentityManager.js'
|
||||
import OIDCAuthenticationManager from './OIDCAuthenticationManager.mjs'
|
||||
import { acceptsJson } from '../../../../../app/src/infrastructure/RequestContentTypeDetection.js'
|
||||
|
||||
const OIDCAuthenticationController = {
|
||||
passportLogin(req, res, next) {
|
||||
req.session.intent = req.query.intent
|
||||
passport.authenticate('openidconnect')(req, res, next)
|
||||
},
|
||||
passportLoginCallback(req, res, next) {
|
||||
passport.authenticate(
|
||||
'openidconnect',
|
||||
{ keepSessionInfo: true },
|
||||
async function (err, user, info) {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
if(req.session.intent === 'link') {
|
||||
delete req.session.intent
|
||||
// After linking, log out from the OIDC provider and redirect back to '/user/settings'.
|
||||
// Keycloak supports this; Authentik does not (yet).
|
||||
const logoutUrl = process.env.OVERLEAF_OIDC_LOGOUT_URL
|
||||
const redirectUri = `${Settings.siteUrl.replace(/\/+$/, '')}/user/settings`
|
||||
return res.redirect(`${logoutUrl}?id_token_hint=${info.idToken}&post_logout_redirect_uri=${encodeURIComponent(redirectUri)}`)
|
||||
}
|
||||
if (user) {
|
||||
req.session.idToken = info.idToken
|
||||
user.externalAuth = 'oidc'
|
||||
// `user` is either a user object or false
|
||||
AuthenticationController.setAuditInfo(req, {
|
||||
method: 'OIDC login',
|
||||
})
|
||||
try {
|
||||
await AuthenticationController.promises.finishLogin(user, req, res)
|
||||
} catch (err) {
|
||||
return next(err)
|
||||
}
|
||||
} else {
|
||||
if (info.redir != null) {
|
||||
await UserController.doLogout(req)
|
||||
return res.redirect(info.redir)
|
||||
} else {
|
||||
res.status(info.status || 401)
|
||||
delete info.status
|
||||
const body = { message: info }
|
||||
return res.json(body)
|
||||
}
|
||||
}
|
||||
}
|
||||
)(req, res, next)
|
||||
},
|
||||
async doPassportLogin(req, issuer, profile, context, idToken, accessToken, refreshToken, done) {
|
||||
let user, info
|
||||
try {
|
||||
if(req.session.intent === 'link') {
|
||||
;({ user, info } = await OIDCAuthenticationController._doLink(
|
||||
req,
|
||||
profile
|
||||
))
|
||||
} else {
|
||||
;({ user, info } = await OIDCAuthenticationController._doLogin(
|
||||
req,
|
||||
profile
|
||||
))
|
||||
}
|
||||
} catch (error) {
|
||||
return done(error)
|
||||
}
|
||||
if (user) {
|
||||
info = {
|
||||
...(info || {}),
|
||||
idToken: idToken
|
||||
}
|
||||
}
|
||||
return done(null, user, info)
|
||||
},
|
||||
async _doLogin(req, profile) {
|
||||
const { fromKnownDevice } = AuthenticationController.getAuditInfo(req)
|
||||
const auditLog = {
|
||||
ipAddress: req.ip,
|
||||
info: { method: 'OIDC login', fromKnownDevice },
|
||||
}
|
||||
|
||||
let user
|
||||
try {
|
||||
user = await OIDCAuthenticationManager.promises.findOrCreateUser(profile, auditLog)
|
||||
} catch (error) {
|
||||
logger.debug({ email : profile.emails[0].value }, `OIDC login failed: ${error}`)
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
type: 'error',
|
||||
text: error.message,
|
||||
status: 500,
|
||||
},
|
||||
}
|
||||
}
|
||||
if (user) {
|
||||
return { user, info: undefined }
|
||||
} else { // user account is not created
|
||||
logger.debug({ email : profile.emails[0].value }, 'OIDC JIT account creation is not allowed for this email')
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
redir: '/register',
|
||||
status: 401,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
async _doLink(req, profile) {
|
||||
const { user: { _id: userId }, ip } = req
|
||||
try {
|
||||
const auditLog = {
|
||||
ipAddress: ip,
|
||||
initiatorId: userId,
|
||||
}
|
||||
await OIDCAuthenticationManager.promises.linkAccount(userId, profile, auditLog)
|
||||
} catch (error) {
|
||||
logger.error(error.info, error.message)
|
||||
return {
|
||||
user: true,
|
||||
info: {
|
||||
type: 'error',
|
||||
text: error.message,
|
||||
status: 200,
|
||||
},
|
||||
}
|
||||
}
|
||||
return { user: true, info: undefined }
|
||||
},
|
||||
async unlinkAccount(req, res, next) {
|
||||
try {
|
||||
const { user: { _id: userId }, body: { providerId }, ip } = req
|
||||
const auditLog = {
|
||||
ipAddress: ip,
|
||||
initiatorId: userId,
|
||||
}
|
||||
await ThirdPartyIdentityManager.promises.unlink(userId, providerId, auditLog)
|
||||
return res.status(200).end()
|
||||
} catch (error) {
|
||||
logger.error(error.info, error.message)
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
type: 'error',
|
||||
text: 'Can not unlink account',
|
||||
status: 200,
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
async passportLogout(req, res, next) {
|
||||
// TODO: instead of storing idToken in session, use refreshToken to obtain a new idToken?
|
||||
const idTokenHint = req.session.idToken
|
||||
await UserController.doLogout(req)
|
||||
const logoutUrl = process.env.OVERLEAF_OIDC_LOGOUT_URL
|
||||
const redirectUri = Settings.siteUrl
|
||||
res.redirect(`${logoutUrl}?id_token_hint=${idTokenHint}&post_logout_redirect_uri=${encodeURIComponent(redirectUri)}`)
|
||||
},
|
||||
passportLogoutCallback(req, res, next) {
|
||||
const redirectUri = Settings.siteUrl
|
||||
res.redirect(redirectUri)
|
||||
},
|
||||
}
|
||||
export default OIDCAuthenticationController
|
|
@ -0,0 +1,108 @@
|
|||
import Settings from '@overleaf/settings'
|
||||
import UserCreator from '../../../../../app/src/Features/User/UserCreator.js'
|
||||
import ThirdPartyIdentityManager from '../../../../../app/src/Features/User/ThirdPartyIdentityManager.js'
|
||||
import { ParallelLoginError } from '../../../../../app/src/Features/Authentication/AuthenticationErrors.js'
|
||||
import { User } from '../../../../../app/src/models/User.js'
|
||||
|
||||
const OIDCAuthenticationManager = {
|
||||
async findOrCreateUser(profile, auditLog) {
|
||||
const {
|
||||
attUserId,
|
||||
attAdmin,
|
||||
valAdmin,
|
||||
updateUserDetailsOnLogin,
|
||||
providerId,
|
||||
} = Settings.oidc
|
||||
const email = profile.emails[0].value
|
||||
const oidcUserId = (attUserId === 'email') ? email : profile[attUserId]
|
||||
const firstName = profile.name?.givenName || ""
|
||||
const lastName = profile.name?.familyName || ""
|
||||
let isAdmin = false
|
||||
if (attAdmin && valAdmin) {
|
||||
if (attAdmin === 'email') {
|
||||
isAdmin = (email === valAdmin)
|
||||
} else {
|
||||
isAdmin = (profile[attAdmin] === valAdmin)
|
||||
}
|
||||
}
|
||||
const oidcUserData = null // Possibly it can be used later
|
||||
let user
|
||||
try {
|
||||
user = await ThirdPartyIdentityManager.promises.login(providerId, oidcUserId, oidcUserData)
|
||||
} catch {
|
||||
// A user with the specified OIDC ID and provider ID is not found. Search for a user with the given email.
|
||||
// If no user exists with this email, create a new user and link the OIDC account to it (provided this is allowed by allowedOIDCEmailDomains).
|
||||
// If a user exists but no account from the specified OIDC provider is linked to this user, link the OIDC account to this user.
|
||||
// If an account from the specified provider is already linked to this user, unlink it, and link the OIDC account to this user.
|
||||
// (Is it safe? Concider: If an account from the specified provider is already linked to this user, throw an error)
|
||||
user = await User.findOne({ 'email': email }).exec()
|
||||
if (!user) {
|
||||
const allowedDomains = Settings.oidc.allowedOIDCEmailDomains
|
||||
if (
|
||||
allowedDomains &&
|
||||
!allowedDomains.some(pattern => {
|
||||
const domain = email.split('@')[1]
|
||||
if (pattern.startsWith('*.')) {
|
||||
const base = pattern.slice(2)
|
||||
return domain.endsWith(`.${base}`)
|
||||
}
|
||||
return domain === pattern
|
||||
})
|
||||
) {
|
||||
return null
|
||||
}
|
||||
user = await UserCreator.promises.createNewUser(
|
||||
{
|
||||
email: email,
|
||||
first_name: firstName,
|
||||
last_name: lastName,
|
||||
isAdmin: isAdmin,
|
||||
holdingAccount: false,
|
||||
}
|
||||
)
|
||||
}
|
||||
// const alreadyLinked = user.thirdPartyIdentifiers.some(item => item.providerId === providerId)
|
||||
// if (!alreadyLinked) {
|
||||
auditLog.initiatorId = user._id
|
||||
await ThirdPartyIdentityManager.promises.link(user._id, providerId, oidcUserId, oidcUserData, auditLog)
|
||||
await User.updateOne(
|
||||
{ _id: user._id },
|
||||
{ $set : {
|
||||
'emails.0.confirmedAt': Date.now(), //email of external user is confirmed
|
||||
},
|
||||
}
|
||||
).exec()
|
||||
// } else {
|
||||
// throw new Error(`Overleaf user ${user.email} is already linked to another ${providerId} user`)
|
||||
// }
|
||||
}
|
||||
|
||||
let userDetails = updateUserDetailsOnLogin ? { first_name : firstName, last_name: lastName } : {}
|
||||
if (attAdmin && valAdmin) {
|
||||
user.isAdmin = isAdmin
|
||||
userDetails.isAdmin = isAdmin
|
||||
}
|
||||
const result = await User.updateOne(
|
||||
{ _id: user._id, loginEpoch: user.loginEpoch }, { $inc: { loginEpoch: 1 }, $set: userDetails },
|
||||
{}
|
||||
).exec()
|
||||
|
||||
if (result.modifiedCount !== 1) {
|
||||
throw new ParallelLoginError()
|
||||
}
|
||||
return user
|
||||
},
|
||||
async linkAccount(userId, profile, auditLog) {
|
||||
const {
|
||||
attUserId,
|
||||
providerId,
|
||||
} = Settings.oidc
|
||||
const oidcUserId = (attUserId === 'email') ? profile.emails[0].value : profile[attUserId]
|
||||
const oidcUserData = null // Possibly it can be used later
|
||||
await ThirdPartyIdentityManager.promises.link(userId, providerId, oidcUserId, oidcUserData, auditLog)
|
||||
},
|
||||
}
|
||||
|
||||
export default {
|
||||
promises: OIDCAuthenticationManager,
|
||||
}
|
|
@ -0,0 +1,86 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import passport from 'passport'
|
||||
import Settings from '@overleaf/settings'
|
||||
import { readFilesContentFromEnv, numFromEnv, boolFromEnv } from '../../../utils.mjs'
|
||||
import PermissionsManager from '../../../../../app/src/Features/Authorization/PermissionsManager.js'
|
||||
import OIDCAuthenticationController from './OIDCAuthenticationController.mjs'
|
||||
import { Strategy as OIDCStrategy } from 'passport-openidconnect'
|
||||
|
||||
const OIDCModuleManager = {
|
||||
initSettings() {
|
||||
let providerId = process.env.OVERLEAF_OIDC_PROVIDER_ID || 'oidc'
|
||||
Settings.oidc = {
|
||||
enable: true,
|
||||
providerId: providerId,
|
||||
identityServiceName: process.env.OVERLEAF_OIDC_IDENTITY_SERVICE_NAME || `Log in with ${Settings.oauthProviders[providerId].name}`,
|
||||
attUserId: process.env.OVERLEAF_OIDC_USER_ID_FIELD || 'id',
|
||||
attAdmin: process.env.OVERLEAF_OIDC_IS_ADMIN_FIELD,
|
||||
valAdmin: process.env.OVERLEAF_OIDC_IS_ADMIN_FIELD_VALUE,
|
||||
updateUserDetailsOnLogin: boolFromEnv(process.env.OVERLEAF_OIDC_UPDATE_USER_DETAILS_ON_LOGIN),
|
||||
allowedOIDCEmailDomains: process.env.OVERLEAF_OIDC_ALLOWED_EMAIL_DOMAINS === undefined
|
||||
? null
|
||||
: process.env.OVERLEAF_OIDC_ALLOWED_EMAIL_DOMAINS.split(',').map(s => s.trim()).filter(Boolean),
|
||||
}
|
||||
},
|
||||
passportSetup(passport, callback) {
|
||||
const oidcOptions = {
|
||||
issuer: process.env.OVERLEAF_OIDC_ISSUER,
|
||||
authorizationURL: process.env.OVERLEAF_OIDC_AUTHORIZATION_URL,
|
||||
tokenURL: process.env.OVERLEAF_OIDC_TOKEN_URL,
|
||||
userInfoURL: process.env.OVERLEAF_OIDC_USER_INFO_URL,
|
||||
clientID: process.env.OVERLEAF_OIDC_CLIENT_ID,
|
||||
clientSecret: process.env.OVERLEAF_OIDC_CLIENT_SECRET,
|
||||
callbackURL: `${Settings.siteUrl.replace(/\/+$/, '')}/oidc/login/callback`,
|
||||
scope: process.env.OVERLEAF_OIDC_SCOPE || 'openid profile email',
|
||||
passReqToCallback: true,
|
||||
}
|
||||
try {
|
||||
passport.use(
|
||||
new OIDCStrategy(
|
||||
oidcOptions,
|
||||
OIDCAuthenticationController.doPassportLogin
|
||||
)
|
||||
)
|
||||
callback(null)
|
||||
} catch (error) {
|
||||
callback(error)
|
||||
}
|
||||
},
|
||||
initPolicy() {
|
||||
try {
|
||||
PermissionsManager.registerCapability('change-password', { default : true })
|
||||
PermissionsManager.registerCapability('use-ai', { default : false })
|
||||
} catch (error) {
|
||||
logger.info({}, error.message)
|
||||
}
|
||||
const oidcPolicyValidator = async ({ user, subscription }) => {
|
||||
// If user is not logged in, user.externalAuth is undefined,
|
||||
// in this case allow to change password if the user has a hashedPassword
|
||||
return user.externalAuth === 'oidc' || (user.externalAuth === undefined && !user.hashedPassword)
|
||||
}
|
||||
try {
|
||||
PermissionsManager.registerPolicy(
|
||||
'oidcPolicy',
|
||||
{ 'change-password' : false },
|
||||
{ validator: oidcPolicyValidator }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.info({}, error.message)
|
||||
}
|
||||
},
|
||||
|
||||
getGroupPolicyForUser(user, callback) {
|
||||
PermissionsManager.promises.getUserValidationStatus({
|
||||
user,
|
||||
groupPolicy : { 'oidcPolicy' : true },
|
||||
subscription : null
|
||||
}).then(userValidationMap => {
|
||||
let groupPolicy = Object.fromEntries(userValidationMap)
|
||||
callback(null, { groupPolicy })
|
||||
}).catch(error => {
|
||||
callback(error)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
export default OIDCModuleManager
|
|
@ -0,0 +1,18 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import UserController from '../../../../../app/src/Features/User/UserController.js'
|
||||
import AuthenticationController from '../../../../../app/src/Features/Authentication/AuthenticationController.js'
|
||||
import OIDCAuthenticationController from './OIDCAuthenticationController.mjs'
|
||||
import logout from '../../../logout.mjs'
|
||||
|
||||
export default {
|
||||
apply(webRouter) {
|
||||
logger.debug({}, 'Init OIDC router')
|
||||
webRouter.get('/oidc/login', OIDCAuthenticationController.passportLogin)
|
||||
AuthenticationController.addEndpointToLoginWhitelist('/oidc/login')
|
||||
webRouter.get('/oidc/login/callback', OIDCAuthenticationController.passportLoginCallback)
|
||||
AuthenticationController.addEndpointToLoginWhitelist('/oidc/login/callback')
|
||||
webRouter.get('/oidc/logout/callback', OIDCAuthenticationController.passportLogoutCallback)
|
||||
webRouter.post('/user/oauth-unlink', OIDCAuthenticationController.unlinkAccount)
|
||||
webRouter.post('/logout', logout, UserController.logout)
|
||||
},
|
||||
}
|
16
services/web/modules/authentication/oidc/index.mjs
Normal file
16
services/web/modules/authentication/oidc/index.mjs
Normal file
|
@ -0,0 +1,16 @@
|
|||
let oidcModule = {}
|
||||
if (process.env.EXTERNAL_AUTH?.includes('oidc')) {
|
||||
const { default: OIDCModuleManager } = await import('./app/src/OIDCModuleManager.mjs')
|
||||
const { default: router } = await import('./app/src/OIDCRouter.mjs')
|
||||
OIDCModuleManager.initSettings()
|
||||
OIDCModuleManager.initPolicy()
|
||||
oidcModule = {
|
||||
name: 'oidc-authentication',
|
||||
hooks: {
|
||||
passportSetup: OIDCModuleManager.passportSetup,
|
||||
getGroupPolicyForUser: OIDCModuleManager.getGroupPolicyForUser,
|
||||
},
|
||||
router: router,
|
||||
}
|
||||
}
|
||||
export default oidcModule
|
|
@ -0,0 +1,150 @@
|
|||
import Settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger'
|
||||
import passport from 'passport'
|
||||
import AuthenticationController from '../../../../../app/src/Features/Authentication/AuthenticationController.js'
|
||||
import SAMLAuthenticationManager from './SAMLAuthenticationManager.mjs'
|
||||
import UserController from '../../../../../app/src/Features/User/UserController.js'
|
||||
import UserSessionsManager from '../../../../../app/src/Features/User/UserSessionsManager.js'
|
||||
import { handleAuthenticateErrors } from '../../../../../app/src/Features/Authentication/AuthenticationErrors.js'
|
||||
import { xmlResponse } from '../../../../../app/src/infrastructure/Response.js'
|
||||
import { readFilesContentFromEnv } from '../../../utils.mjs'
|
||||
|
||||
const SAMLAuthenticationController = {
|
||||
passportLogin(req, res, next) {
|
||||
if ( passport._strategy('saml')._saml.options.authnRequestBinding === 'HTTP-POST') {
|
||||
const csp = res.getHeader('Content-Security-Policy')
|
||||
if (csp) {
|
||||
res.setHeader(
|
||||
'Content-Security-Policy',
|
||||
csp.replace(/(?:^|\s)(default-src|form-action)[^;]*;?/g, '')
|
||||
)
|
||||
}
|
||||
}
|
||||
passport.authenticate('saml')(req, res, next)
|
||||
},
|
||||
passportLoginCallback(req, res, next) {
|
||||
// This function is middleware which wraps the passport.authenticate middleware,
|
||||
// so we can send back our custom `{message: {text: "", type: ""}}` responses on failure,
|
||||
// and send a `{redir: ""}` response on success
|
||||
passport.authenticate(
|
||||
'saml',
|
||||
{ keepSessionInfo: true },
|
||||
async function (err, user, info) {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
if (user) {
|
||||
// `user` is either a user object or false
|
||||
AuthenticationController.setAuditInfo(req, {
|
||||
method: 'SAML login',
|
||||
})
|
||||
try {
|
||||
await AuthenticationController.promises.finishLogin(user, req, res)
|
||||
} catch (err) {
|
||||
return next(err)
|
||||
}
|
||||
} else {
|
||||
if (info.redir != null) {
|
||||
return res.json({ redir: info.redir })
|
||||
} else {
|
||||
res.status(info.status || 401)
|
||||
delete info.status
|
||||
const body = { message: info }
|
||||
return res.json(body)
|
||||
}
|
||||
}
|
||||
}
|
||||
)(req, res, next)
|
||||
},
|
||||
async doPassportLogin(req, profile, done) {
|
||||
let user, info
|
||||
try {
|
||||
;({ user, info } = await SAMLAuthenticationController._doPassportLogin(
|
||||
req,
|
||||
profile
|
||||
))
|
||||
} catch (error) {
|
||||
return done(error)
|
||||
}
|
||||
return done(undefined, user, info)
|
||||
},
|
||||
async _doPassportLogin(req, profile) {
|
||||
const { fromKnownDevice } = AuthenticationController.getAuditInfo(req)
|
||||
const auditLog = {
|
||||
ipAddress: req.ip,
|
||||
info: { method: 'SAML login', fromKnownDevice },
|
||||
}
|
||||
|
||||
let user
|
||||
try {
|
||||
user = await SAMLAuthenticationManager.promises.findOrCreateUser(profile, auditLog)
|
||||
} catch (error) {
|
||||
return {
|
||||
user: false,
|
||||
info: handleAuthenticateErrors(error, req),
|
||||
}
|
||||
}
|
||||
if (user) {
|
||||
user.externalAuth = 'saml'
|
||||
req.session.saml_extce = {nameID : profile.nameID, sessionIndex : profile.sessionIndex}
|
||||
return { user, info: undefined }
|
||||
} else { // we cannot be here, something is terribly wrong
|
||||
logger.debug({ email : profile.mail }, 'failed SAML log in')
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
type: 'error',
|
||||
text: 'Unknown error',
|
||||
status: 500,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
async passportLogout(req, res, next) {
|
||||
passport._strategy('saml').logout(req, async (err, url) => {
|
||||
await UserController.doLogout(req)
|
||||
if (err) return next(err)
|
||||
res.redirect(url)
|
||||
})
|
||||
},
|
||||
passportLogoutCallback(req, res, next) {
|
||||
//TODO: is it possible to close the editor?
|
||||
passport.authenticate('saml')(req, res, (err) => {
|
||||
if (err) return next(err)
|
||||
res.redirect('/login');
|
||||
})
|
||||
},
|
||||
async doPassportLogout(req, profile, done) {
|
||||
let user, info
|
||||
try {
|
||||
;({ user, info } = await SAMLAuthenticationController._doPassportLogout(
|
||||
req,
|
||||
profile
|
||||
))
|
||||
} catch (error) {
|
||||
return done(error)
|
||||
}
|
||||
return done(undefined, user, info)
|
||||
},
|
||||
async _doPassportLogout(req, profile) {
|
||||
if (req?.session?.saml_extce?.nameID === profile.nameID &&
|
||||
req?.session?.saml_extce?.sessionIndex === profile.sessionIndex) {
|
||||
profile = req.user
|
||||
}
|
||||
await UserSessionsManager.promises.untrackSession(req.user, req.sessionID).catch(err => {
|
||||
logger.warn({ err, userId: req.user._id }, 'failed to untrack session')
|
||||
})
|
||||
return { user: profile, info: undefined }
|
||||
},
|
||||
getSPMetadata(req, res) {
|
||||
const samlStratery = passport._strategy('saml')
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${samlStratery._saml.options.issuer}-meta.xml"`)
|
||||
xmlResponse(res,
|
||||
samlStratery.generateServiceProviderMetadata(
|
||||
readFilesContentFromEnv(process.env.OVERLEAF_SAML_DECRYPTION_CERT),
|
||||
readFilesContentFromEnv(process.env.OVERLEAF_SAML_PUBLIC_CERT)
|
||||
)
|
||||
)
|
||||
},
|
||||
}
|
||||
export default SAMLAuthenticationController
|
|
@ -0,0 +1,85 @@
|
|||
import Settings from '@overleaf/settings'
|
||||
import UserCreator from '../../../../../app/src/Features/User/UserCreator.js'
|
||||
import { ParallelLoginError } from '../../../../../app/src/Features/Authentication/AuthenticationErrors.js'
|
||||
import SAMLIdentityManager from '../../../../../app/src/Features/User/SAMLIdentityManager.js'
|
||||
import { User } from '../../../../../app/src/models/User.js'
|
||||
|
||||
const SAMLAuthenticationManager = {
|
||||
async findOrCreateUser(profile, auditLog) {
|
||||
const {
|
||||
attUserId,
|
||||
attEmail,
|
||||
attFirstName,
|
||||
attLastName,
|
||||
attAdmin,
|
||||
valAdmin,
|
||||
updateUserDetailsOnLogin,
|
||||
} = Settings.saml
|
||||
const externalUserId = profile[attUserId]
|
||||
const email = Array.isArray(profile[attEmail])
|
||||
? profile[attEmail][0].toLowerCase()
|
||||
: profile[attEmail].toLowerCase()
|
||||
const firstName = attFirstName ? profile[attFirstName] : ""
|
||||
const lastName = attLastName ? profile[attLastName] : email
|
||||
let isAdmin = false
|
||||
if (attAdmin && valAdmin) {
|
||||
isAdmin = (Array.isArray(profile[attAdmin]) ? profile[attAdmin].includes(valAdmin) :
|
||||
profile[attAdmin] === valAdmin)
|
||||
}
|
||||
const providerId = '1' // for now, only one fixed IdP is supported
|
||||
// We search for a SAML user, and if none is found, we search for a user with the given email. If a user is found,
|
||||
// we update the user to be a SAML user, otherwise, we create a new SAML user with the given email. In the case of
|
||||
// multiple SAML IdPs, one would have to do something similar, or possibly report an error like
|
||||
// 'the email is associated with the wrong IdP'
|
||||
let user = await SAMLIdentityManager.getUser(providerId, externalUserId, attUserId)
|
||||
if (!user) {
|
||||
user = await User.findOne({ 'email': email }).exec()
|
||||
if (!user) {
|
||||
user = await UserCreator.promises.createNewUser(
|
||||
{
|
||||
email: email,
|
||||
first_name: firstName,
|
||||
last_name: lastName,
|
||||
isAdmin: isAdmin,
|
||||
holdingAccount: false,
|
||||
samlIdentifiers: [{ providerId: providerId }],
|
||||
}
|
||||
)
|
||||
}
|
||||
// cannot use SAMLIdentityManager.linkAccounts because affilations service is not there
|
||||
await User.updateOne(
|
||||
{ _id: user._id },
|
||||
{
|
||||
$set : {
|
||||
'emails.0.confirmedAt': Date.now(), //email of saml user is confirmed
|
||||
'emails.0.samlProviderId': providerId,
|
||||
'samlIdentifiers.0.providerId': providerId,
|
||||
'samlIdentifiers.0.externalUserId': externalUserId,
|
||||
'samlIdentifiers.0.userIdAttribute': attUserId,
|
||||
},
|
||||
}
|
||||
).exec()
|
||||
}
|
||||
let userDetails = updateUserDetailsOnLogin ? { first_name : firstName, last_name: lastName } : {}
|
||||
if (attAdmin && valAdmin) {
|
||||
user.isAdmin = isAdmin
|
||||
userDetails.isAdmin = isAdmin
|
||||
}
|
||||
const result = await User.updateOne(
|
||||
{ _id: user._id, loginEpoch: user.loginEpoch },
|
||||
{
|
||||
$inc: { loginEpoch: 1 },
|
||||
$set: userDetails,
|
||||
$unset: { hashedPassword: "" },
|
||||
},
|
||||
).exec()
|
||||
if (result.modifiedCount !== 1) {
|
||||
throw new ParallelLoginError()
|
||||
}
|
||||
return user
|
||||
},
|
||||
}
|
||||
|
||||
export default {
|
||||
promises: SAMLAuthenticationManager,
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import passport from 'passport'
|
||||
import Settings from '@overleaf/settings'
|
||||
import { readFilesContentFromEnv, numFromEnv, boolFromEnv } from '../../../utils.mjs'
|
||||
import PermissionsManager from '../../../../../app/src/Features/Authorization/PermissionsManager.js'
|
||||
import SAMLAuthenticationController from './SAMLAuthenticationController.mjs'
|
||||
import { Strategy as SAMLStrategy } from '@node-saml/passport-saml'
|
||||
|
||||
const SAMLModuleManager = {
|
||||
initSettings() {
|
||||
Settings.saml = {
|
||||
enable: true,
|
||||
identityServiceName: process.env.OVERLEAF_SAML_IDENTITY_SERVICE_NAME || 'Log in with SAML IdP',
|
||||
attUserId: process.env.OVERLEAF_SAML_USER_ID_FIELD || 'nameID',
|
||||
attEmail: process.env.OVERLEAF_SAML_EMAIL_FIELD || 'nameID',
|
||||
attFirstName: process.env.OVERLEAF_SAML_FIRST_NAME_FIELD || 'givenName',
|
||||
attLastName: process.env.OVERLEAF_SAML_LAST_NAME_FIELD || 'lastName',
|
||||
attAdmin: process.env.OVERLEAF_SAML_IS_ADMIN_FIELD,
|
||||
valAdmin: process.env.OVERLEAF_SAML_IS_ADMIN_FIELD_VALUE,
|
||||
updateUserDetailsOnLogin: boolFromEnv(process.env.OVERLEAF_SAML_UPDATE_USER_DETAILS_ON_LOGIN),
|
||||
}
|
||||
},
|
||||
passportSetup(passport, callback) {
|
||||
const samlOptions = {
|
||||
entryPoint: process.env.OVERLEAF_SAML_ENTRYPOINT,
|
||||
callbackUrl: `${Settings.siteUrl.replace(/\/+$/, '')}/saml/login/callback`,
|
||||
issuer: process.env.OVERLEAF_SAML_ISSUER,
|
||||
audience: process.env.OVERLEAF_SAML_AUDIENCE,
|
||||
cert: readFilesContentFromEnv(process.env.OVERLEAF_SAML_IDP_CERT),
|
||||
privateKey: readFilesContentFromEnv(process.env.OVERLEAF_SAML_PRIVATE_KEY),
|
||||
decryptionPvk: readFilesContentFromEnv(process.env.OVERLEAF_SAML_DECRYPTION_PVK),
|
||||
signatureAlgorithm: process.env.OVERLEAF_SAML_SIGNATURE_ALGORITHM,
|
||||
additionalParams: JSON.parse(process.env.OVERLEAF_SAML_ADDITIONAL_PARAMS || '{}'),
|
||||
additionalAuthorizeParams: JSON.parse(process.env.OVERLEAF_SAML_ADDITIONAL_AUTHORIZE_PARAMS || '{}'),
|
||||
identifierFormat: process.env.OVERLEAF_SAML_IDENTIFIER_FORMAT,
|
||||
acceptedClockSkewMs: numFromEnv(process.env.OVERLEAF_SAML_ACCEPTED_CLOCK_SKEW_MS),
|
||||
attributeConsumingServiceIndex: process.env.OVERLEAF_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX,
|
||||
authnContext: process.env.OVERLEAF_SAML_AUTHN_CONTEXT ? JSON.parse(process.env.OVERLEAF_SAML_AUTHN_CONTEXT) : undefined,
|
||||
forceAuthn: boolFromEnv(process.env.OVERLEAF_SAML_FORCE_AUTHN),
|
||||
disableRequestedAuthnContext: boolFromEnv(process.env.OVERLEAF_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT),
|
||||
skipRequestCompression: process.env.OVERLEAF_SAML_AUTHN_REQUEST_BINDING === 'HTTP-POST', // compression should be skipped iff authnRequestBinding is POST
|
||||
authnRequestBinding: process.env.OVERLEAF_SAML_AUTHN_REQUEST_BINDING,
|
||||
validateInResponseTo: process.env.OVERLEAF_SAML_VALIDATE_IN_RESPONSE_TO,
|
||||
requestIdExpirationPeriodMs: numFromEnv(process.env.OVERLEAF_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS),
|
||||
// cacheProvider: process.env.OVERLEAF_SAML_CACHE_PROVIDER,
|
||||
logoutUrl: process.env.OVERLEAF_SAML_LOGOUT_URL,
|
||||
logoutCallbackUrl: `${Settings.siteUrl.replace(/\/+$/, '')}/saml/logout/callback`,
|
||||
additionalLogoutParams: JSON.parse(process.env.OVERLEAF_SAML_ADDITIONAL_LOGOUT_PARAMS || '{}'),
|
||||
wantAssertionsSigned: boolFromEnv(process.env.OVERLEAF_SAML_WANT_ASSERTIONS_SIGNED),
|
||||
wantAuthnResponseSigned: boolFromEnv(process.env.OVERLEAF_SAML_WANT_AUTHN_RESPONSE_SIGNED),
|
||||
passReqToCallback: true,
|
||||
}
|
||||
try {
|
||||
passport.use(
|
||||
new SAMLStrategy(
|
||||
samlOptions,
|
||||
SAMLAuthenticationController.doPassportLogin,
|
||||
SAMLAuthenticationController.doPassportLogout
|
||||
)
|
||||
)
|
||||
callback(null)
|
||||
} catch (error) {
|
||||
callback(error)
|
||||
}
|
||||
},
|
||||
initPolicy() {
|
||||
try {
|
||||
PermissionsManager.registerCapability('change-password', { default : true })
|
||||
PermissionsManager.registerCapability('use-ai', { default : false })
|
||||
} catch (error) {
|
||||
logger.info({}, error.message)
|
||||
}
|
||||
const samlPolicyValidator = async ({ user, subscription }) => {
|
||||
// If user is not logged in, user.externalAuth is undefined,
|
||||
// in this case allow to change password if the user has a hashedPassword
|
||||
return user.externalAuth === 'saml' || (user.externalAuth === undefined && !user.hashedPassword)
|
||||
}
|
||||
try {
|
||||
PermissionsManager.registerPolicy(
|
||||
'samlPolicy',
|
||||
{ 'change-password' : false },
|
||||
{ validator: samlPolicyValidator }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.info({}, error.message)
|
||||
}
|
||||
},
|
||||
getGroupPolicyForUser(user, callback) {
|
||||
PermissionsManager.promises.getUserValidationStatus({
|
||||
user,
|
||||
groupPolicy : { 'samlPolicy' : true },
|
||||
subscription : null
|
||||
}).then(userValidationMap => {
|
||||
let groupPolicy = Object.fromEntries(userValidationMap)
|
||||
callback(null, { groupPolicy })
|
||||
}).catch(error => {
|
||||
callback(error)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
export default SAMLModuleManager
|
|
@ -0,0 +1,11 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import SAMLAuthenticationController from './SAMLAuthenticationController.mjs'
|
||||
|
||||
export default {
|
||||
apply(webRouter) {
|
||||
logger.debug({}, 'Init SAML NonCsrfRouter')
|
||||
webRouter.post('/saml/login/callback', SAMLAuthenticationController.passportLoginCallback)
|
||||
webRouter.get ('/saml/logout/callback', SAMLAuthenticationController.passportLogoutCallback)
|
||||
webRouter.post('/saml/logout/callback', SAMLAuthenticationController.passportLogoutCallback)
|
||||
},
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
import logger from '@overleaf/logger'
|
||||
import AuthenticationController from '../../../../../app/src/Features/Authentication/AuthenticationController.js'
|
||||
import UserController from '../../../../../app/src/Features/User/UserController.js'
|
||||
import SAMLAuthenticationController from './SAMLAuthenticationController.mjs'
|
||||
import logout from '../../../logout.mjs'
|
||||
|
||||
export default {
|
||||
apply(webRouter) {
|
||||
logger.debug({}, 'Init SAML router')
|
||||
webRouter.get('/saml/login', SAMLAuthenticationController.passportLogin)
|
||||
AuthenticationController.addEndpointToLoginWhitelist('/saml/login')
|
||||
webRouter.get('/saml/meta', SAMLAuthenticationController.getSPMetadata)
|
||||
AuthenticationController.addEndpointToLoginWhitelist('/saml/meta')
|
||||
webRouter.post('/logout', logout, UserController.logout)
|
||||
},
|
||||
}
|
18
services/web/modules/authentication/saml/index.mjs
Normal file
18
services/web/modules/authentication/saml/index.mjs
Normal file
|
@ -0,0 +1,18 @@
|
|||
let samlModule = {}
|
||||
if (process.env.EXTERNAL_AUTH?.includes('saml')) {
|
||||
const { default: SAMLModuleManager } = await import('./app/src/SAMLModuleManager.mjs')
|
||||
const { default: router } = await import('./app/src/SAMLRouter.mjs')
|
||||
const { default: nonCsrfRouter } = await import('./app/src/SAMLNonCsrfRouter.mjs')
|
||||
SAMLModuleManager.initSettings()
|
||||
SAMLModuleManager.initPolicy()
|
||||
samlModule = {
|
||||
name: 'saml-authentication',
|
||||
hooks: {
|
||||
passportSetup: SAMLModuleManager.passportSetup,
|
||||
getGroupPolicyForUser: SAMLModuleManager.getGroupPolicyForUser,
|
||||
},
|
||||
router: router,
|
||||
nonCsrfRouter: nonCsrfRouter,
|
||||
}
|
||||
}
|
||||
export default samlModule
|
42
services/web/modules/authentication/utils.mjs
Normal file
42
services/web/modules/authentication/utils.mjs
Normal file
|
@ -0,0 +1,42 @@
|
|||
import fs from 'fs'
|
||||
function readFilesContentFromEnv(envVar) {
|
||||
// envVar is either a file name: 'file.pem', or string with array: '["file.pem", "file2.pem"]'
|
||||
if (!envVar) return undefined
|
||||
try {
|
||||
const parsedFileNames = JSON.parse(envVar)
|
||||
return parsedFileNames.map(filename => fs.readFileSync(filename, 'utf8'))
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) { // failed to parse, envVar must be a file name
|
||||
return fs.readFileSync(envVar, 'utf8')
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
function numFromEnv(env) {
|
||||
return env ? Number(env) : undefined
|
||||
}
|
||||
function boolFromEnv(env) {
|
||||
if (env === undefined || env === null) return undefined
|
||||
if (typeof env === "string") {
|
||||
const envLower = env.toLowerCase()
|
||||
if (envLower === 'true') return true
|
||||
if (envLower === 'false') return false
|
||||
}
|
||||
throw new Error("Invalid value for boolean envirionment variable")
|
||||
}
|
||||
|
||||
function splitFullName(fullName) {
|
||||
fullName = fullName.trim();
|
||||
let lastSpaceIndex = fullName.lastIndexOf(' ');
|
||||
let firstNames = fullName.substring(0, lastSpaceIndex).trim();
|
||||
let lastName = fullName.substring(lastSpaceIndex + 1).trim();
|
||||
return [firstNames, lastName];
|
||||
}
|
||||
|
||||
export {
|
||||
readFilesContentFromEnv,
|
||||
numFromEnv,
|
||||
boolFromEnv,
|
||||
splitFullName,
|
||||
}
|
|
@ -8,7 +8,6 @@ import _ from 'lodash'
|
|||
import ProjectGetter from '../../../../../app/src/Features/Project/ProjectGetter.js'
|
||||
import User from '../../../../../test/acceptance/src/helpers/User.mjs'
|
||||
import MockDocUpdaterApiClass from '../../../../../test/acceptance/src/mocks/MockDocUpdaterApi.mjs'
|
||||
import Features from '../../../../../app/src/infrastructure/Features.js'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
|
@ -188,32 +187,25 @@ describe('ProjectStructureChanges', function () {
|
|||
const cases = [
|
||||
{
|
||||
label: 'with filestore disabled and project-history-blobs enabled',
|
||||
disableFilestore: true,
|
||||
enableProjectHistoryBlobs: true,
|
||||
filestoreMigrationLevel: 2,
|
||||
},
|
||||
{
|
||||
label: 'with filestore enabled and project-history-blobs enabled',
|
||||
disableFilestore: false,
|
||||
enableProjectHistoryBlobs: true,
|
||||
filestoreMigrationLevel: 1,
|
||||
},
|
||||
{
|
||||
label: 'with filestore enabled and project-history-blobs disabled',
|
||||
disableFilestore: false,
|
||||
enableProjectHistoryBlobs: false,
|
||||
filestoreMigrationLevel: 0,
|
||||
},
|
||||
]
|
||||
for (const { label, disableFilestore, enableProjectHistoryBlobs } of cases) {
|
||||
for (const { label, filestoreMigrationLevel } of cases) {
|
||||
describe(label, function () {
|
||||
const previousDisableFilestore = Settings.disableFilestore
|
||||
const previousEnableProjectHistoryBlobs =
|
||||
Settings.enableProjectHistoryBlobs
|
||||
const previousFilestoreMigrationLevel = Settings.filestoreMigrationLevel
|
||||
beforeEach(function () {
|
||||
Settings.disableFilestore = disableFilestore
|
||||
Settings.enableProjectHistoryBlobs = enableProjectHistoryBlobs
|
||||
Settings.filestoreMigrationLevel = filestoreMigrationLevel
|
||||
})
|
||||
afterEach(function () {
|
||||
Settings.disableFilestore = previousDisableFilestore
|
||||
Settings.enableProjectHistoryBlobs = previousEnableProjectHistoryBlobs
|
||||
Settings.filestoreMigrationLevel = previousFilestoreMigrationLevel
|
||||
})
|
||||
|
||||
describe('creating a project from the example template', function () {
|
||||
|
@ -244,7 +236,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[2].type).to.equal('add-file')
|
||||
expect(updates[2].userId).to.equal(owner._id)
|
||||
expect(updates[2].pathname).to.equal('/frog.jpg')
|
||||
if (disableFilestore) {
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
expect(updates[2].url).to.not.exist
|
||||
expect(updates[2].createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -301,10 +293,10 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[2].type).to.equal('add-file')
|
||||
expect(updates[2].userId).to.equal(owner._id)
|
||||
expect(updates[2].pathname).to.equal('/frog.jpg')
|
||||
if (disableFilestore) {
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
expect(updates[2].url).to.not.exist
|
||||
expect(updates[2].createdBlob).to.be.true
|
||||
} else if (Features.hasFeature('project-history-blobs')) {
|
||||
} else if (filestoreMigrationLevel === 1) {
|
||||
expect(updates[2].url).to.be.null
|
||||
} else {
|
||||
expect(updates[2].url).to.be.a('string')
|
||||
|
@ -378,7 +370,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[1].type).to.equal('add-file')
|
||||
expect(updates[1].userId).to.equal(owner._id)
|
||||
expect(updates[1].pathname).to.equal('/1pixel.png')
|
||||
if (disableFilestore) {
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
expect(updates[1].url).to.not.exist
|
||||
expect(updates[1].createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -478,7 +470,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(update.type).to.equal('add-file')
|
||||
expect(update.userId).to.equal(owner._id)
|
||||
expect(update.pathname).to.equal('/1pixel.png')
|
||||
if (disableFilestore) {
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
expect(update.url).to.not.exist
|
||||
expect(update.createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -516,7 +508,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[1].type).to.equal('add-file')
|
||||
expect(updates[1].userId).to.equal(owner._id)
|
||||
expect(updates[1].pathname).to.equal('/1pixel.png')
|
||||
if (disableFilestore) {
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
expect(updates[1].url).to.not.exist
|
||||
expect(updates[1].createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -1005,7 +997,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(update.type).to.equal('add-file')
|
||||
expect(update.userId).to.equal(owner._id)
|
||||
expect(update.pathname).to.equal('/1pixel.png')
|
||||
if (disableFilestore) {
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
expect(update.url).to.not.exist
|
||||
expect(update.createdBlob).to.be.true
|
||||
} else {
|
||||
|
@ -1068,7 +1060,7 @@ describe('ProjectStructureChanges', function () {
|
|||
expect(updates[1].type).to.equal('add-file')
|
||||
expect(updates[1].userId).to.equal(owner._id)
|
||||
expect(updates[1].pathname).to.equal('/1pixel.png')
|
||||
if (disableFilestore) {
|
||||
if (filestoreMigrationLevel === 2) {
|
||||
expect(updates[1].url).to.not.exist
|
||||
expect(updates[1].createdBlob).to.be.true
|
||||
} else {
|
||||
|
|
|
@ -154,7 +154,8 @@ function registerExternalAuthAdmin(authMethod) {
|
|||
await User.updateOne(
|
||||
{ _id: user._id },
|
||||
{
|
||||
$set: { isAdmin: true, emails: [{ email, reversedHostname }] },
|
||||
$set: { isAdmin: true, emails: [{ email, reversedHostname, 'confirmedAt' : Date.now() }] },
|
||||
$unset: { 'hashedPassword': "" }, // external-auth user must not have a hashedPassword
|
||||
}
|
||||
).exec()
|
||||
} catch (err) {
|
||||
|
|
|
@ -29,7 +29,7 @@ block vars
|
|||
|
||||
block append meta
|
||||
meta(name='ol-adminUserExists' data-type='boolean' content=adminUserExists)
|
||||
meta(name='ol-ideJsPath' content=buildJsPath('ide.js'))
|
||||
meta(name='ol-ideJsPath' content=buildJsPath('ide-detached.js'))
|
||||
|
||||
block content
|
||||
script(
|
||||
|
@ -122,6 +122,42 @@ block content
|
|||
span(data-ol-inflight='idle') #{translate("register")}
|
||||
span(hidden data-ol-inflight='pending') #{translate("registering")}…
|
||||
|
||||
h3 #{translate('local_account')}
|
||||
p
|
||||
| #{translate('alternatively_create_local_admin_account')}
|
||||
|
||||
form(
|
||||
data-ol-async-form
|
||||
data-ol-register-admin
|
||||
action='/launchpad/register_admin'
|
||||
method='POST'
|
||||
)
|
||||
input(name='_csrf', type='hidden', value=csrfToken)
|
||||
+formMessages()
|
||||
.form-group
|
||||
label(for='email') #{translate("email")}
|
||||
input.form-control(
|
||||
type='email'
|
||||
name='email'
|
||||
placeholder='email@example.com'
|
||||
autocomplete='username'
|
||||
required
|
||||
autofocus='true'
|
||||
)
|
||||
.form-group
|
||||
label(for='password') #{translate("password")}
|
||||
input.form-control#passwordField(
|
||||
type='password'
|
||||
name='password'
|
||||
placeholder='********'
|
||||
autocomplete='new-password'
|
||||
required
|
||||
)
|
||||
.actions
|
||||
button.btn-primary.btn(type='submit' data-ol-disabled-inflight)
|
||||
span(data-ol-inflight="idle") #{translate("register")}
|
||||
span(hidden data-ol-inflight="pending") #{translate("registering")}…
|
||||
|
||||
// Saml Form
|
||||
if authMethod === 'saml'
|
||||
h3 #{translate('saml')}
|
||||
|
@ -140,12 +176,47 @@ block content
|
|||
label(for='email') #{translate("email")}
|
||||
input.form-control(
|
||||
name='email'
|
||||
type='email'
|
||||
placeholder='email@example.com'
|
||||
autocomplete='username'
|
||||
required
|
||||
autofocus='true'
|
||||
)
|
||||
.actions
|
||||
button.btn-primary.btn(type='submit' data-ol-disabled-inflight)
|
||||
span(data-ol-inflight="idle") #{translate("register")}
|
||||
span(hidden data-ol-inflight="pending") #{translate("registering")}…
|
||||
|
||||
h3 #{translate('local_account')}
|
||||
p
|
||||
| #{translate('alternatively_create_local_admin_account')}
|
||||
|
||||
form(
|
||||
data-ol-async-form
|
||||
data-ol-register-admin
|
||||
action='/launchpad/register_admin'
|
||||
method='POST'
|
||||
)
|
||||
input(name='_csrf', type='hidden', value=csrfToken)
|
||||
+formMessages()
|
||||
.form-group
|
||||
label(for='email') #{translate("email")}
|
||||
input.form-control(
|
||||
type='email'
|
||||
name='email'
|
||||
placeholder='email@example.com'
|
||||
autocomplete='username'
|
||||
required
|
||||
autofocus='true'
|
||||
)
|
||||
.form-group
|
||||
label(for='password') #{translate("password")}
|
||||
input.form-control#passwordField(
|
||||
type='password'
|
||||
name='password'
|
||||
placeholder='********'
|
||||
autocomplete='new-password'
|
||||
required
|
||||
)
|
||||
.actions
|
||||
button.btn-primary.btn(type='submit' data-ol-disabled-inflight)
|
||||
span(data-ol-inflight='idle') #{translate("register")}
|
||||
|
|
|
@ -162,6 +162,7 @@
|
|||
"passport-ldapauth": "^2.1.4",
|
||||
"passport-local": "^1.0.0",
|
||||
"passport-oauth2": "^1.5.0",
|
||||
"passport-openidconnect": "^0.1.2",
|
||||
"passport-orcid": "0.0.4",
|
||||
"pug": "^3.0.3",
|
||||
"pug-runtime": "^3.0.1",
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue