Add files via upload

This commit is contained in:
David Rotermund 2024-07-20 12:15:55 +02:00 committed by GitHub
parent 50de000228
commit 5b2a2e61df
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 2039 additions and 6 deletions

View file

@ -1,7 +1,7 @@
# docker network create overleaf-network # docker network create overleaf-network
services: services:
overleafserver: overleafserver:
image: "sharelatex/sharelatex" image: "sharelatex/sharelatex:5.0.7"
container_name: overleafserver container_name: overleafserver
hostname: overleafserver hostname: overleafserver
restart: always restart: always
@ -11,6 +11,13 @@ services:
- overleaf_tex2024:/usr/local/texlive/2023 - overleaf_tex2024:/usr/local/texlive/2023
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
- /root/overleafserver/UserRegistrationHandler.js:/overleaf/services/web/app/src/Features/User/UserRegistrationHandler.js - /root/overleafserver/UserRegistrationHandler.js:/overleaf/services/web/app/src/Features/User/UserRegistrationHandler.js
- /root/overleafserver/services/web/app/src/Features/Project/ProjectEditorHandler.js:/overleaf/services/web/app/src/Features/Project/ProjectEditorHandler.js
- /root/overleafserver/services/web/modules/track-changes/app/src/TrackChangesController.js:/overleaf/services/web/modules/track-changes/app/src/TrackChangesController.js
- /root/overleafserver/services/web/modules/track-changes/app/src/TrackChangesRouter.js:/overleaf/services/web/modules/track-changes/app/src/TrackChangesRouter.js
- /root/overleafserver/services/web/modules/track-changes/index.js:/overleaf/services/web/modules/track-changes/index.js
- /root/overleafserver/services/web/config/settings.defaults.js:/overleaf/services/web/config/settings.defaults.js
- /root/overleafserver/services/web/app/src/Features/Chat/ChatManager.js:/overleaf/services/web/app/src/Features/Chat/ChatManager.js
- /root/overleafserver/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js:/overleaf/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js
ports: ports:
- 11001:80 - 11001:80
@ -31,18 +38,20 @@ services:
EMAIL_CONFIRMATION_DISABLED: "false" EMAIL_CONFIRMATION_DISABLED: "false"
OVERLEAF_BEHIND_PROXY: "true" OVERLEAF_BEHIND_PROXY: "true"
OVERLEAF_SECURE_COOKIE: "true" OVERLEAF_SECURE_COOKIE: "true"
OVERLEAF_SITE_URL: "https://overleaf.neuro.uni-bremen.de" OVERLEAF_SITE_URL: "https://psintern.neuro.uni-bremen.de"
OVERLEAF_NAV_TITLE: "FB1 Overleaf" OVERLEAF_NAV_TITLE: "FB1 Overleaf"
OVERLEAF_ADMIN_EMAIL: "overleaf@uni-bremen.de" OVERLEAF_ADMIN_EMAIL: "psintern@neuro.uni-bremen.de"
OVERLEAF_EMAIL_FROM_ADDRESS: "overleaf@uni-bremen.de" OVERLEAF_EMAIL_FROM_ADDRESS: "psintern@neuro.uni-bremen.de"
OVERLEAF_EMAIL_SMTP_HOST: "smtp.uni-bremen.de" OVERLEAF_EMAIL_SMTP_HOST: "mailhost.neurotec.uni-bremen.de"
OVERLEAF_EMAIL_SMTP_PORT: "465" OVERLEAF_EMAIL_SMTP_PORT: "465"
OVERLEAF_EMAIL_SMTP_SECURE: "true" OVERLEAF_EMAIL_SMTP_SECURE: "true"
OVERLEAF_EMAIL_SMTP_USER: "overleaf" OVERLEAF_EMAIL_SMTP_USER: "psintern"
OVERLEAF_EMAIL_SMTP_PASS: ${EMAIL_PASS} OVERLEAF_EMAIL_SMTP_PASS: ${EMAIL_PASS}
OVERLEAF_EMAIL_SMTP_LOGGER: "true" OVERLEAF_EMAIL_SMTP_LOGGER: "true"
OVERLEAF_CUSTOM_EMAIL_FOOTER: "This system is run by the ITP" OVERLEAF_CUSTOM_EMAIL_FOOTER: "This system is run by the ITP"
OVERLEAF_EMAIL_SMTP_IGNORE_TLS: "true"
OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH: "false"
networks: networks:
- overleaf-network - overleaf-network

View file

@ -0,0 +1,61 @@
const async = require('async')
const UserInfoManager = require('../User/UserInfoManager')
const UserInfoController = require('../User/UserInfoController')
const { promisify } = require('@overleaf/promise-utils')
function injectUserInfoIntoThreads(threads, callback) {
// There will be a lot of repitition of user_ids, so first build a list
// of unique ones to perform db look ups on, then use these to populate the
// user fields
let message, thread, threadId, userId
if (callback == null) {
callback = function () {}
}
const userIds = {}
for (threadId in threads) {
thread = threads[threadId]
if (thread.resolved) {
userIds[thread.resolved_by_user_id] = true
}
for (message of Array.from(thread.messages)) {
userIds[message.user_id] = true
}
}
const jobs = []
const users = {}
for (userId in userIds) {
;(userId =>
jobs.push(cb =>
UserInfoManager.getPersonalInfo(userId, function (error, user) {
if (error != null) return cb(error)
user = UserInfoController.formatPersonalInfo(user)
users[userId] = user
cb()
})
))(userId)
}
return async.series(jobs, function (error) {
if (error != null) {
return callback(error)
}
for (threadId in threads) {
thread = threads[threadId]
if (thread.resolved) {
thread.resolved_by_user = users[thread.resolved_by_user_id]
}
for (message of Array.from(thread.messages)) {
message.user = users[message.user_id]
}
}
return callback(null, threads)
})
}
module.exports = {
injectUserInfoIntoThreads,
promises: {
injectUserInfoIntoThreads: promisify(injectUserInfoIntoThreads),
},
}

View file

@ -0,0 +1,530 @@
const request = require('request').defaults({ timeout: 30 * 1000 })
const OError = require('@overleaf/o-error')
const settings = require('@overleaf/settings')
const _ = require('lodash')
const async = require('async')
const logger = require('@overleaf/logger')
const metrics = require('@overleaf/metrics')
const { promisify } = require('util')
const { promisifyMultiResult } = require('@overleaf/promise-utils')
const ProjectGetter = require('../Project/ProjectGetter')
function flushProjectToMongo(projectId, callback) {
_makeRequest(
{
path: `/project/${projectId}/flush`,
method: 'POST',
},
projectId,
'flushing.mongo.project',
callback
)
}
function flushMultipleProjectsToMongo(projectIds, callback) {
const jobs = projectIds.map(projectId => callback => {
flushProjectToMongo(projectId, callback)
})
async.series(jobs, callback)
}
function flushProjectToMongoAndDelete(projectId, callback) {
_makeRequest(
{
path: `/project/${projectId}`,
method: 'DELETE',
},
projectId,
'flushing.mongo.project',
callback
)
}
function flushDocToMongo(projectId, docId, callback) {
_makeRequest(
{
path: `/project/${projectId}/doc/${docId}/flush`,
method: 'POST',
},
projectId,
'flushing.mongo.doc',
callback
)
}
function deleteDoc(projectId, docId, ignoreFlushErrors, callback) {
if (typeof ignoreFlushErrors === 'function') {
callback = ignoreFlushErrors
ignoreFlushErrors = false
}
let path = `/project/${projectId}/doc/${docId}`
if (ignoreFlushErrors) {
path += '?ignore_flush_errors=true'
}
const method = 'DELETE'
_makeRequest(
{
path,
method,
},
projectId,
'delete.mongo.doc',
callback
)
}
function getDocument(projectId, docId, fromVersion, callback) {
_makeRequest(
{
path: `/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`,
json: true,
},
projectId,
'get-document',
function (error, doc) {
if (error) {
return callback(error)
}
callback(null, doc.lines, doc.version, doc.ranges, doc.ops)
}
)
}
function setDocument(projectId, docId, userId, docLines, source, callback) {
_makeRequest(
{
path: `/project/${projectId}/doc/${docId}`,
method: 'POST',
json: {
lines: docLines,
source,
user_id: userId,
},
},
projectId,
'set-document',
callback
)
}
function getProjectDocsIfMatch(projectId, projectStateHash, callback) {
// If the project state hasn't changed, we can get all the latest
// docs from redis via the docupdater. Otherwise we will need to
// fall back to getting them from mongo.
const timer = new metrics.Timer('get-project-docs')
const url = `${settings.apis.documentupdater.url}/project/${projectId}/get_and_flush_if_old?state=${projectStateHash}`
request.post(url, function (error, res, body) {
timer.done()
if (error) {
OError.tag(error, 'error getting project docs from doc updater', {
url,
projectId,
})
return callback(error)
}
if (res.statusCode === 409) {
// HTTP response code "409 Conflict"
// Docupdater has checked the projectStateHash and found that
// it has changed. This means that the docs currently in redis
// aren't the only change to the project and the full set of
// docs/files should be retreived from docstore/filestore
// instead.
callback()
} else if (res.statusCode >= 200 && res.statusCode < 300) {
let docs
try {
docs = JSON.parse(body)
} catch (error1) {
return callback(OError.tag(error1))
}
callback(null, docs)
} else {
callback(
new OError(
`doc updater returned a non-success status code: ${res.statusCode}`,
{
projectId,
url,
}
)
)
}
})
}
function clearProjectState(projectId, callback) {
_makeRequest(
{
path: `/project/${projectId}/clearState`,
method: 'POST',
},
projectId,
'clear-project-state',
callback
)
}
function acceptChanges(projectId, docId, changeIds, callback) {
_makeRequest(
{
path: `/project/${projectId}/doc/${docId}/change/accept`,
json: { change_ids: changeIds },
method: 'POST',
},
projectId,
'accept-changes',
callback
)
}
function resolveThread(projectId, docId, threadId, userId, callback) {
_makeRequest(
{
path: `/project/${projectId}/doc/${docId}/comment/${threadId}/resolve`,
method: 'POST',
json: {
user_id: userId,
},
},
projectId,
'resolve-thread',
callback
)
}
function reopenThread(projectId, docId, threadId, userId, callback) {
_makeRequest(
{
path: `/project/${projectId}/doc/${docId}/comment/${threadId}/reopen`,
method: 'POST',
json: {
user_id: userId,
},
},
projectId,
'reopen-thread',
callback
)
}
function deleteThread(projectId, docId, threadId, userId, callback) {
_makeRequest(
{
path: `/project/${projectId}/doc/${docId}/comment/${threadId}`,
method: 'DELETE',
json: {
user_id: userId,
},
},
projectId,
'delete-thread',
callback
)
}
function resyncProjectHistory(
projectId,
projectHistoryId,
docs,
files,
opts,
callback
) {
const body = { docs, files, projectHistoryId }
if (opts.historyRangesMigration) {
body.historyRangesMigration = opts.historyRangesMigration
}
_makeRequest(
{
path: `/project/${projectId}/history/resync`,
json: body,
method: 'POST',
timeout: 6 * 60 * 1000, // allow 6 minutes for resync
},
projectId,
'resync-project-history',
callback
)
}
/**
* Block a project from being loaded in docupdater
*
* @param {string} projectId
* @param {Callback} callback
*/
function blockProject(projectId, callback) {
_makeRequest(
{ path: `/project/${projectId}/block`, method: 'POST', json: true },
projectId,
'block-project',
(err, body) => {
if (err) {
return callback(err)
}
callback(null, body.blocked)
}
)
}
/**
* Unblock a previously blocked project
*
* @param {string} projectId
* @param {Callback} callback
*/
function unblockProject(projectId, callback) {
_makeRequest(
{ path: `/project/${projectId}/unblock`, method: 'POST', json: true },
projectId,
'unblock-project',
(err, body) => {
if (err) {
return callback(err)
}
callback(null, body.wasBlocked)
}
)
}
function updateProjectStructure(
projectId,
projectHistoryId,
userId,
changes,
source,
callback
) {
if (
settings.apis.project_history == null ||
!settings.apis.project_history.sendProjectStructureOps
) {
return callback()
}
ProjectGetter.getProjectWithoutLock(
projectId,
{ overleaf: true },
(err, project) => {
if (err) {
return callback(err)
}
const historyRangesSupport = _.get(
project,
'overleaf.history.rangesSupportEnabled',
false
)
const {
deletes: docDeletes,
adds: docAdds,
renames: docRenames,
} = _getUpdates(
'doc',
changes.oldDocs,
changes.newDocs,
historyRangesSupport
)
const {
deletes: fileDeletes,
adds: fileAdds,
renames: fileRenames,
} = _getUpdates(
'file',
changes.oldFiles,
changes.newFiles,
historyRangesSupport
)
const updates = [].concat(
docDeletes,
fileDeletes,
docAdds,
fileAdds,
docRenames,
fileRenames
)
const projectVersion =
changes && changes.newProject && changes.newProject.version
if (updates.length < 1) {
return callback()
}
if (projectVersion == null) {
logger.warn(
{ projectId, changes, projectVersion },
'did not receive project version in changes'
)
return callback(new Error('did not receive project version in changes'))
}
_makeRequest(
{
path: `/project/${projectId}`,
json: {
updates,
userId,
version: projectVersion,
projectHistoryId,
source,
},
method: 'POST',
},
projectId,
'update-project-structure',
callback
)
}
)
}
function _makeRequest(options, projectId, metricsKey, callback) {
const timer = new metrics.Timer(metricsKey)
request(
{
url: `${settings.apis.documentupdater.url}${options.path}`,
json: options.json,
method: options.method || 'GET',
timeout: options.timeout || 30 * 1000,
},
function (error, res, body) {
timer.done()
if (error) {
logger.warn(
{ error, projectId },
'error making request to document updater'
)
callback(error)
} else if (res.statusCode >= 200 && res.statusCode < 300) {
callback(null, body)
} else {
error = new Error(
`document updater returned a failure status code: ${res.statusCode}`
)
logger.warn(
{ error, projectId },
`document updater returned failure status code: ${res.statusCode}`
)
callback(error)
}
}
)
}
function _getUpdates(
entityType,
oldEntities,
newEntities,
historyRangesSupport
) {
if (!oldEntities) {
oldEntities = []
}
if (!newEntities) {
newEntities = []
}
const deletes = []
const adds = []
const renames = []
const oldEntitiesHash = _.keyBy(oldEntities, entity =>
entity[entityType]._id.toString()
)
const newEntitiesHash = _.keyBy(newEntities, entity =>
entity[entityType]._id.toString()
)
// Send deletes before adds (and renames) to keep a 1:1 mapping between
// paths and ids
//
// When a file is replaced, we first delete the old file and then add the
// new file. If the 'add' operation is sent to project history before the
// 'delete' then we would have two files with the same path at that point
// in time.
for (const id in oldEntitiesHash) {
const oldEntity = oldEntitiesHash[id]
const newEntity = newEntitiesHash[id]
if (newEntity == null) {
// entity deleted
deletes.push({
type: `rename-${entityType}`,
id,
pathname: oldEntity.path,
newPathname: '',
})
}
}
for (const id in newEntitiesHash) {
const newEntity = newEntitiesHash[id]
const oldEntity = oldEntitiesHash[id]
if (oldEntity == null) {
// entity added
adds.push({
type: `add-${entityType}`,
id,
pathname: newEntity.path,
docLines: newEntity.docLines,
ranges: newEntity.ranges,
historyRangesSupport,
url: newEntity.url,
hash: newEntity.file != null ? newEntity.file.hash : undefined,
})
} else if (newEntity.path !== oldEntity.path) {
// entity renamed
renames.push({
type: `rename-${entityType}`,
id,
pathname: oldEntity.path,
newPathname: newEntity.path,
})
}
}
return { deletes, adds, renames }
}
module.exports = {
flushProjectToMongo,
flushMultipleProjectsToMongo,
flushProjectToMongoAndDelete,
flushDocToMongo,
deleteDoc,
getDocument,
setDocument,
getProjectDocsIfMatch,
clearProjectState,
acceptChanges,
resolveThread,
reopenThread,
deleteThread,
resyncProjectHistory,
blockProject,
unblockProject,
updateProjectStructure,
promises: {
flushProjectToMongo: promisify(flushProjectToMongo),
flushMultipleProjectsToMongo: promisify(flushMultipleProjectsToMongo),
flushProjectToMongoAndDelete: promisify(flushProjectToMongoAndDelete),
flushDocToMongo: promisify(flushDocToMongo),
deleteDoc: promisify(deleteDoc),
getDocument: promisifyMultiResult(getDocument, [
'lines',
'version',
'ranges',
'ops',
]),
setDocument: promisify(setDocument),
getProjectDocsIfMatch: promisify(getProjectDocsIfMatch),
clearProjectState: promisify(clearProjectState),
acceptChanges: promisify(acceptChanges),
resolveThread: promisify(resolveThread),
reopenThread: promisify(reopenThread),
deleteThread: promisify(deleteThread),
resyncProjectHistory: promisify(resyncProjectHistory),
blockProject: promisify(blockProject),
unblockProject: promisify(unblockProject),
updateProjectStructure: promisify(updateProjectStructure),
},
}

View file

@ -0,0 +1,156 @@
let ProjectEditorHandler
const _ = require('lodash')
const Path = require('path')
function mergeDeletedDocs(a, b) {
const docIdsInA = new Set(a.map(doc => doc._id.toString()))
return a.concat(b.filter(doc => !docIdsInA.has(doc._id.toString())))
}
module.exports = ProjectEditorHandler = {
trackChangesAvailable: true,
buildProjectModelView(project, members, invites, deletedDocsFromDocstore) {
let owner, ownerFeatures
if (!Array.isArray(project.deletedDocs)) {
project.deletedDocs = []
}
project.deletedDocs.forEach(doc => {
// The frontend does not use this field.
delete doc.deletedAt
})
const result = {
_id: project._id,
name: project.name,
rootDoc_id: project.rootDoc_id,
rootFolder: [this.buildFolderModelView(project.rootFolder[0])],
publicAccesLevel: project.publicAccesLevel,
dropboxEnabled: !!project.existsInDropbox,
compiler: project.compiler,
description: project.description,
spellCheckLanguage: project.spellCheckLanguage,
deletedByExternalDataSource: project.deletedByExternalDataSource || false,
deletedDocs: mergeDeletedDocs(
project.deletedDocs,
deletedDocsFromDocstore
),
members: [],
invites: this.buildInvitesView(invites),
imageName:
project.imageName != null
? Path.basename(project.imageName)
: undefined,
}
;({ owner, ownerFeatures, members } =
this.buildOwnerAndMembersViews(members))
result.owner = owner
result.members = members
result.features = _.defaults(ownerFeatures || {}, {
collaborators: -1, // Infinite
versioning: false,
dropbox: false,
compileTimeout: 60,
compileGroup: 'standard',
templates: false,
references: false,
referencesSearch: false,
mendeley: false,
trackChanges: false,
trackChangesVisible: ProjectEditorHandler.trackChangesAvailable,
symbolPalette: false,
})
if (result.features.trackChanges) {
result.trackChangesState = project.track_changes || false
}
// Originally these two feature flags were both signalled by the now-deprecated `references` flag.
// For older users, the presence of the `references` feature flag should still turn on these features.
result.features.referencesSearch =
result.features.referencesSearch || result.features.references
result.features.mendeley =
result.features.mendeley || result.features.references
return result
},
buildOwnerAndMembersViews(members) {
let owner = null
let ownerFeatures = null
const filteredMembers = []
for (const member of members || []) {
if (member.privilegeLevel === 'owner') {
ownerFeatures = member.user.features
owner = this.buildUserModelView(member.user, 'owner')
} else {
filteredMembers.push(
this.buildUserModelView(member.user, member.privilegeLevel)
)
}
}
return {
owner,
ownerFeatures,
members: filteredMembers,
}
},
buildUserModelView(user, privileges) {
return {
_id: user._id,
first_name: user.first_name,
last_name: user.last_name,
email: user.email,
privileges,
signUpDate: user.signUpDate,
}
},
buildFolderModelView(folder) {
const fileRefs = _.filter(folder.fileRefs || [], file => file != null)
return {
_id: folder._id,
name: folder.name,
folders: (folder.folders || []).map(childFolder =>
this.buildFolderModelView(childFolder)
),
fileRefs: fileRefs.map(file => this.buildFileModelView(file)),
docs: (folder.docs || []).map(doc => this.buildDocModelView(doc)),
}
},
buildFileModelView(file) {
return {
_id: file._id,
name: file.name,
linkedFileData: file.linkedFileData,
created: file.created,
}
},
buildDocModelView(doc) {
return {
_id: doc._id,
name: doc.name,
}
},
buildInvitesView(invites) {
if (invites == null) {
return []
}
return invites.map(invite =>
_.pick(invite, [
'_id',
'createdAt',
'email',
'expires',
'privileges',
'projectId',
'sendingUserId',
])
)
},
}

View file

@ -0,0 +1,916 @@
const fs = require('fs')
const Path = require('path')
const { merge } = require('@overleaf/settings/merge')
// Automatically detect module imports that are included in this version of the application (SaaS, Server-CE, Server Pro).
// E.g. during a Server-CE build, we will not find imports for proprietary modules.
//
// Restart webpack after adding/removing modules.
const MODULES_PATH = Path.join(__dirname, '../modules')
const entryPointsIde = []
const entryPointsMain = []
fs.readdirSync(MODULES_PATH).forEach(module => {
const entryPathIde = Path.join(
MODULES_PATH,
module,
'/frontend/js/ide/index.js'
)
if (fs.existsSync(entryPathIde)) {
entryPointsIde.push(entryPathIde)
}
const entryPathMain = Path.join(
MODULES_PATH,
module,
'/frontend/js/main/index.js'
)
if (fs.existsSync(entryPathMain)) {
entryPointsMain.push(entryPathMain)
}
})
let defaultFeatures, siteUrl
// Make time interval config easier.
const seconds = 1000
const minutes = 60 * seconds
// These credentials are used for authenticating api requests
// between services that may need to go over public channels
const httpAuthUser = process.env.WEB_API_USER
const httpAuthPass = process.env.WEB_API_PASSWORD
const httpAuthUsers = {}
if (httpAuthUser && httpAuthPass) {
httpAuthUsers[httpAuthUser] = httpAuthPass
}
const sessionSecret = process.env.SESSION_SECRET || 'secret-please-change'
const intFromEnv = function (name, defaultValue) {
if (
[null, undefined].includes(defaultValue) ||
typeof defaultValue !== 'number'
) {
throw new Error(
`Bad default integer value for setting: ${name}, ${defaultValue}`
)
}
return parseInt(process.env[name], 10) || defaultValue
}
const defaultTextExtensions = [
'tex',
'latex',
'sty',
'cls',
'bst',
'bib',
'bibtex',
'txt',
'tikz',
'mtx',
'rtex',
'md',
'asy',
'lbx',
'bbx',
'cbx',
'm',
'lco',
'dtx',
'ins',
'ist',
'def',
'clo',
'ldf',
'rmd',
'lua',
'gv',
'mf',
'yml',
'yaml',
'lhs',
'mk',
'xmpdata',
'cfg',
'rnw',
'ltx',
'inc',
]
const parseTextExtensions = function (extensions) {
if (extensions) {
return extensions.split(',').map(ext => ext.trim())
} else {
return []
}
}
module.exports = {
env: 'server-ce',
limits: {
httpGlobalAgentMaxSockets: 300,
httpsGlobalAgentMaxSockets: 300,
},
allowAnonymousReadAndWriteSharing:
process.env.OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING === 'true',
// Databases
// ---------
mongo: {
options: {
appname: 'web',
maxPoolSize: parseInt(process.env.MONGO_POOL_SIZE, 10) || 100,
serverSelectionTimeoutMS:
parseInt(process.env.MONGO_SERVER_SELECTION_TIMEOUT, 10) || 60000,
// Setting socketTimeoutMS to 0 means no timeout
socketTimeoutMS: parseInt(
process.env.MONGO_SOCKET_TIMEOUT ?? '60000',
10
),
monitorCommands: true,
},
url:
process.env.MONGO_CONNECTION_STRING ||
process.env.MONGO_URL ||
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`,
hasSecondaries: process.env.MONGO_HAS_SECONDARIES === 'true',
},
redis: {
web: {
host: process.env.REDIS_HOST || 'localhost',
port: process.env.REDIS_PORT || '6379',
password: process.env.REDIS_PASSWORD || '',
db: process.env.REDIS_DB,
maxRetriesPerRequest: parseInt(
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
),
},
// websessions:
// cluster: [
// {host: 'localhost', port: 7000}
// {host: 'localhost', port: 7001}
// {host: 'localhost', port: 7002}
// {host: 'localhost', port: 7003}
// {host: 'localhost', port: 7004}
// {host: 'localhost', port: 7005}
// ]
// ratelimiter:
// cluster: [
// {host: 'localhost', port: 7000}
// {host: 'localhost', port: 7001}
// {host: 'localhost', port: 7002}
// {host: 'localhost', port: 7003}
// {host: 'localhost', port: 7004}
// {host: 'localhost', port: 7005}
// ]
// cooldown:
// cluster: [
// {host: 'localhost', port: 7000}
// {host: 'localhost', port: 7001}
// {host: 'localhost', port: 7002}
// {host: 'localhost', port: 7003}
// {host: 'localhost', port: 7004}
// {host: 'localhost', port: 7005}
// ]
api: {
host: process.env.REDIS_HOST || 'localhost',
port: process.env.REDIS_PORT || '6379',
password: process.env.REDIS_PASSWORD || '',
maxRetriesPerRequest: parseInt(
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
),
},
},
// Service locations
// -----------------
// Configure which ports to run each service on. Generally you
// can leave these as they are unless you have some other services
// running which conflict, or want to run the web process on port 80.
internal: {
web: {
port: process.env.WEB_PORT || 3000,
host: process.env.LISTEN_ADDRESS || 'localhost',
},
},
// Tell each service where to find the other services. If everything
// is running locally then this is easy, but they exist as separate config
// options incase you want to run some services on remote hosts.
apis: {
web: {
url: `http://${
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
user: httpAuthUser,
pass: httpAuthPass,
},
documentupdater: {
url: `http://${
process.env.DOCUPDATER_HOST ||
process.env.DOCUMENT_UPDATER_HOST ||
'localhost'
}:3003`,
},
spelling: {
url: `http://${process.env.SPELLING_HOST || 'localhost'}:3005`,
host: process.env.SPELLING_HOST,
},
docstore: {
url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
pubUrl: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
},
chat: {
internal_url: `http://${process.env.CHAT_HOST || 'localhost'}:3010`,
},
filestore: {
url: `http://${process.env.FILESTORE_HOST || 'localhost'}:3009`,
},
clsi: {
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`,
// url: "http://#{process.env['CLSI_LB_HOST']}:3014"
backendGroupName: undefined,
defaultBackendClass: process.env.CLSI_DEFAULT_BACKEND_CLASS || 'e2',
submissionBackendClass:
process.env.CLSI_SUBMISSION_BACKEND_CLASS || 'n2d',
},
project_history: {
sendProjectStructureOps: true,
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`,
},
realTime: {
url: `http://${process.env.REALTIME_HOST || 'localhost'}:3026`,
},
contacts: {
url: `http://${process.env.CONTACTS_HOST || 'localhost'}:3036`,
},
notifications: {
url: `http://${process.env.NOTIFICATIONS_HOST || 'localhost'}:3042`,
},
webpack: {
url: `http://${process.env.WEBPACK_HOST || 'localhost'}:3808`,
},
wiki: {
url: process.env.WIKI_URL || 'https://learn.sharelatex.com',
maxCacheAge: parseInt(process.env.WIKI_MAX_CACHE_AGE || 5 * minutes, 10),
},
haveIBeenPwned: {
enabled: process.env.HAVE_I_BEEN_PWNED_ENABLED === 'true',
url:
process.env.HAVE_I_BEEN_PWNED_URL || 'https://api.pwnedpasswords.com',
timeout: parseInt(process.env.HAVE_I_BEEN_PWNED_TIMEOUT, 10) || 5 * 1000,
},
// For legacy reasons, we need to populate the below objects.
v1: {},
recurly: {},
},
jwt: {
key: process.env.OT_JWT_AUTH_KEY,
algorithm: process.env.OT_JWT_AUTH_ALG || 'HS256',
},
splitTest: {
devToolbar: {
enabled: false,
},
},
splitTests: [],
// Where your instance of Overleaf Community Edition/Server Pro can be found publicly. Used in emails
// that are sent out, generated links, etc.
siteUrl: (siteUrl = process.env.PUBLIC_URL || 'http://localhost:3000'),
lockManager: {
lockTestInterval: intFromEnv('LOCK_MANAGER_LOCK_TEST_INTERVAL', 50),
maxTestInterval: intFromEnv('LOCK_MANAGER_MAX_TEST_INTERVAL', 1000),
maxLockWaitTime: intFromEnv('LOCK_MANAGER_MAX_LOCK_WAIT_TIME', 10000),
redisLockExpiry: intFromEnv('LOCK_MANAGER_REDIS_LOCK_EXPIRY', 30),
slowExecutionThreshold: intFromEnv(
'LOCK_MANAGER_SLOW_EXECUTION_THRESHOLD',
5000
),
},
// Optional separate location for websocket connections, if unset defaults to siteUrl.
wsUrl: process.env.WEBSOCKET_URL,
wsUrlV2: process.env.WEBSOCKET_URL_V2,
wsUrlBeta: process.env.WEBSOCKET_URL_BETA,
wsUrlV2Percentage: parseInt(
process.env.WEBSOCKET_URL_V2_PERCENTAGE || '0',
10
),
wsRetryHandshake: parseInt(process.env.WEBSOCKET_RETRY_HANDSHAKE || '5', 10),
// cookie domain
// use full domain for cookies to only be accessible from that domain,
// replace subdomain with dot to have them accessible on all subdomains
cookieDomain: process.env.COOKIE_DOMAIN,
cookieName: process.env.COOKIE_NAME || 'overleaf.sid',
cookieRollingSession: true,
// this is only used if cookies are used for clsi backend
// clsiCookieKey: "clsiserver"
robotsNoindex: process.env.ROBOTS_NOINDEX === 'true' || false,
maxEntitiesPerProject: 2000,
projectUploadTimeout: parseInt(
process.env.PROJECT_UPLOAD_TIMEOUT || '120000',
10
),
maxUploadSize: 50 * 1024 * 1024, // 50 MB
multerOptions: {
preservePath: process.env.MULTER_PRESERVE_PATH,
},
// start failing the health check if active handles exceeds this limit
maxActiveHandles: process.env.MAX_ACTIVE_HANDLES
? parseInt(process.env.MAX_ACTIVE_HANDLES, 10)
: undefined,
// Security
// --------
security: {
sessionSecret,
bcryptRounds: parseInt(process.env.BCRYPT_ROUNDS, 10) || 12,
}, // number of rounds used to hash user passwords (raised to power 2)
adminUrl: process.env.ADMIN_URL,
adminOnlyLogin: process.env.ADMIN_ONLY_LOGIN === 'true',
adminPrivilegeAvailable: process.env.ADMIN_PRIVILEGE_AVAILABLE === 'true',
blockCrossOriginRequests: process.env.BLOCK_CROSS_ORIGIN_REQUESTS === 'true',
allowedOrigins: (process.env.ALLOWED_ORIGINS || siteUrl).split(','),
httpAuthUsers,
// Default features
// ----------------
//
// You can select the features that are enabled by default for new
// new users.
defaultFeatures: (defaultFeatures = {
collaborators: -1,
dropbox: true,
github: true,
gitBridge: true,
versioning: true,
compileTimeout: 180,
compileGroup: 'standard',
references: true,
templates: true,
trackChanges: true,
}),
// featuresEpoch: 'YYYY-MM-DD',
features: {
personal: defaultFeatures,
},
groupPlanModalOptions: {
plan_codes: [],
currencies: [],
sizes: [],
usages: [],
},
plans: [
{
planCode: 'personal',
name: 'Personal',
price_in_cents: 0,
features: defaultFeatures,
},
],
enableSubscriptions: false,
restrictedCountries: [],
enableOnboardingEmails: process.env.ENABLE_ONBOARDING_EMAILS === 'true',
enabledLinkedFileTypes: (process.env.ENABLED_LINKED_FILE_TYPES || '').split(
','
),
// i18n
// ------
//
i18n: {
checkForHTMLInVars: process.env.I18N_CHECK_FOR_HTML_IN_VARS === 'true',
escapeHTMLInVars: process.env.I18N_ESCAPE_HTML_IN_VARS === 'true',
subdomainLang: {
www: { lngCode: 'en', url: siteUrl },
},
defaultLng: 'en',
},
// Spelling languages
// ------------------
//
// You must have the corresponding aspell package installed to
// be able to use a language.
languages: [
{ code: 'en', name: 'English' },
{ code: 'en_US', name: 'English (American)' },
{ code: 'en_GB', name: 'English (British)' },
{ code: 'en_CA', name: 'English (Canadian)' },
{ code: 'af', name: 'Afrikaans' },
{ code: 'ar', name: 'Arabic' },
{ code: 'gl', name: 'Galician' },
{ code: 'eu', name: 'Basque' },
{ code: 'br', name: 'Breton' },
{ code: 'bg', name: 'Bulgarian' },
{ code: 'ca', name: 'Catalan' },
{ code: 'hr', name: 'Croatian' },
{ code: 'cs', name: 'Czech' },
{ code: 'da', name: 'Danish' },
{ code: 'nl', name: 'Dutch' },
{ code: 'eo', name: 'Esperanto' },
{ code: 'et', name: 'Estonian' },
{ code: 'fo', name: 'Faroese' },
{ code: 'fr', name: 'French' },
{ code: 'de', name: 'German' },
{ code: 'el', name: 'Greek' },
{ code: 'id', name: 'Indonesian' },
{ code: 'ga', name: 'Irish' },
{ code: 'it', name: 'Italian' },
{ code: 'kk', name: 'Kazakh' },
{ code: 'ku', name: 'Kurdish' },
{ code: 'lv', name: 'Latvian' },
{ code: 'lt', name: 'Lithuanian' },
{ code: 'nr', name: 'Ndebele' },
{ code: 'ns', name: 'Northern Sotho' },
{ code: 'no', name: 'Norwegian' },
{ code: 'fa', name: 'Persian' },
{ code: 'pl', name: 'Polish' },
{ code: 'pt_BR', name: 'Portuguese (Brazilian)' },
{ code: 'pt_PT', name: 'Portuguese (European)' },
{ code: 'pa', name: 'Punjabi' },
{ code: 'ro', name: 'Romanian' },
{ code: 'ru', name: 'Russian' },
{ code: 'sk', name: 'Slovak' },
{ code: 'sl', name: 'Slovenian' },
{ code: 'st', name: 'Southern Sotho' },
{ code: 'es', name: 'Spanish' },
{ code: 'sv', name: 'Swedish' },
{ code: 'tl', name: 'Tagalog' },
{ code: 'ts', name: 'Tsonga' },
{ code: 'tn', name: 'Tswana' },
{ code: 'hsb', name: 'Upper Sorbian' },
{ code: 'cy', name: 'Welsh' },
{ code: 'xh', name: 'Xhosa' },
],
translatedLanguages: {
cn: '简体中文',
cs: 'Čeština',
da: 'Dansk',
de: 'Deutsch',
en: 'English',
es: 'Español',
fi: 'Suomi',
fr: 'Français',
it: 'Italiano',
ja: '日本語',
ko: '한국어',
nl: 'Nederlands',
no: 'Norsk',
pl: 'Polski',
pt: 'Português',
ro: 'Română',
ru: 'Русский',
sv: 'Svenska',
tr: 'Türkçe',
uk: 'Українська',
'zh-CN': '简体中文',
},
maxDictionarySize: 1024 * 1024, // 1 MB
// Password Settings
// -----------
// These restrict the passwords users can use when registering
// opts are from http://antelle.github.io/passfield
passwordStrengthOptions: {
length: {
min: 8,
// Bcrypt does not support longer passwords than that.
max: 72,
},
},
elevateAccountSecurityAfterFailedLogin:
parseInt(process.env.ELEVATED_ACCOUNT_SECURITY_AFTER_FAILED_LOGIN_MS, 10) ||
24 * 60 * 60 * 1000,
deviceHistory: {
cookieName: process.env.DEVICE_HISTORY_COOKIE_NAME || 'deviceHistory',
entryExpiry:
parseInt(process.env.DEVICE_HISTORY_ENTRY_EXPIRY_MS, 10) ||
90 * 24 * 60 * 60 * 1000,
maxEntries: parseInt(process.env.DEVICE_HISTORY_MAX_ENTRIES, 10) || 10,
secret: process.env.DEVICE_HISTORY_SECRET,
},
// Email support
// -------------
//
// Overleaf uses nodemailer (http://www.nodemailer.com/) to send transactional emails.
// To see the range of transport and options they support, see http://www.nodemailer.com/docs/transports
// email:
// fromAddress: ""
// replyTo: ""
// lifecycle: false
// # Example transport and parameter settings for Amazon SES
// transport: "SES"
// parameters:
// AWSAccessKeyID: ""
// AWSSecretKey: ""
// For legacy reasons, we need to populate this object.
sentry: {},
// Production Settings
// -------------------
debugPugTemplates: process.env.DEBUG_PUG_TEMPLATES === 'true',
precompilePugTemplatesAtBootTime: process.env
.PRECOMPILE_PUG_TEMPLATES_AT_BOOT_TIME
? process.env.PRECOMPILE_PUG_TEMPLATES_AT_BOOT_TIME === 'true'
: process.env.NODE_ENV === 'production',
// Should javascript assets be served minified or not.
useMinifiedJs: process.env.MINIFIED_JS === 'true' || false,
// Should static assets be sent with a header to tell the browser to cache
// them.
cacheStaticAssets: false,
// If you are running Overleaf over https, set this to true to send the
// cookie with a secure flag (recommended).
secureCookie: false,
// 'SameSite' cookie setting. Can be set to 'lax', 'none' or 'strict'
// 'lax' is recommended, as 'strict' will prevent people linking to projects
// https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-4.1.2.7
sameSiteCookie: 'lax',
// If you are running Overleaf behind a proxy (like Apache, Nginx, etc)
// then set this to true to allow it to correctly detect the forwarded IP
// address and http/https protocol information.
behindProxy: false,
// Delay before closing the http server upon receiving a SIGTERM process signal.
gracefulShutdownDelayInMs:
parseInt(process.env.GRACEFUL_SHUTDOWN_DELAY_SECONDS ?? '5', 10) * seconds,
// Expose the hostname in the `X-Served-By` response header
exposeHostname: process.env.EXPOSE_HOSTNAME === 'true',
// Cookie max age (in milliseconds). Set to false for a browser session.
cookieSessionLength: 5 * 24 * 60 * 60 * 1000, // 5 days
// When true, only allow invites to be sent to email addresses that
// already have user accounts
restrictInvitesToExistingAccounts: false,
// Should we allow access to any page without logging in? This includes
// public projects, /learn, /templates, about pages, etc.
allowPublicAccess: process.env.OVERLEAF_ALLOW_PUBLIC_ACCESS === 'true',
// editor should be open by default
editorIsOpen: process.env.EDITOR_OPEN !== 'false',
// site should be open by default
siteIsOpen: process.env.SITE_OPEN !== 'false',
// status file for closing/opening the site at run-time, polled every 5s
siteMaintenanceFile: process.env.SITE_MAINTENANCE_FILE,
// Use a single compile directory for all users in a project
// (otherwise each user has their own directory)
// disablePerUserCompiles: true
// Domain the client (pdfjs) should download the compiled pdf from
pdfDownloadDomain: process.env.COMPILES_USER_CONTENT_DOMAIN, // "http://clsi-lb:3014"
// By default turn on feature flag, can be overridden per request.
enablePdfCaching: process.env.ENABLE_PDF_CACHING === 'true',
// Maximum size of text documents in the real-time editing system.
max_doc_length: 2 * 1024 * 1024, // 2mb
primary_email_check_expiration: 1000 * 60 * 60 * 24 * 90, // 90 days
// Maximum JSON size in HTTP requests
// We should be able to process twice the max doc length, to allow for
// - the doc content
// - text ranges spanning the whole doc
//
// There's also overhead required for the JSON encoding and the UTF-8 encoding,
// theoretically up to 3 times the max doc length. On the other hand, we don't
// want to block the event loop with JSON parsing, so we try to find a
// practical compromise.
max_json_request_size:
parseInt(process.env.MAX_JSON_REQUEST_SIZE) || 6 * 1024 * 1024, // 6 MB
// Internal configs
// ----------------
path: {
// If we ever need to write something to disk (e.g. incoming requests
// that need processing but may be too big for memory, then write
// them to disk here).
dumpFolder: Path.resolve(__dirname, '../data/dumpFolder'),
uploadFolder: Path.resolve(__dirname, '../data/uploads'),
},
// Automatic Snapshots
// -------------------
automaticSnapshots: {
// How long should we wait after the user last edited to
// take a snapshot?
waitTimeAfterLastEdit: 5 * minutes,
// Even if edits are still taking place, this is maximum
// time to wait before taking another snapshot.
maxTimeBetweenSnapshots: 30 * minutes,
},
// Smoke test
// ----------
// Provide log in credentials and a project to be able to run
// some basic smoke tests to check the core functionality.
//
smokeTest: {
user: process.env.SMOKE_TEST_USER,
userId: process.env.SMOKE_TEST_USER_ID,
password: process.env.SMOKE_TEST_PASSWORD,
projectId: process.env.SMOKE_TEST_PROJECT_ID,
rateLimitSubject: process.env.SMOKE_TEST_RATE_LIMIT_SUBJECT || '127.0.0.1',
stepTimeout: parseInt(process.env.SMOKE_TEST_STEP_TIMEOUT || '10000', 10),
},
appName: process.env.APP_NAME || 'Overleaf (Community Edition)',
adminEmail: process.env.ADMIN_EMAIL || 'placeholder@example.com',
adminDomains: process.env.ADMIN_DOMAINS
? JSON.parse(process.env.ADMIN_DOMAINS)
: undefined,
nav: {
title: process.env.APP_NAME || 'Overleaf Community Edition',
hide_powered_by: process.env.NAV_HIDE_POWERED_BY === 'true',
left_footer: [],
right_footer: [
{
text: "<i class='fa fa-github-square'></i> Fork on GitHub!",
url: 'https://github.com/overleaf/overleaf',
},
],
showSubscriptionLink: false,
header_extras: [],
},
// Example:
// header_extras: [{text: "Some Page", url: "http://example.com/some/page", class: "subdued"}]
recaptcha: {
endpoint:
process.env.RECAPTCHA_ENDPOINT ||
'https://www.google.com/recaptcha/api/siteverify',
trustedUsers: (process.env.CAPTCHA_TRUSTED_USERS || '')
.split(',')
.map(x => x.trim())
.filter(x => x !== ''),
disabled: {
invite: true,
login: true,
passwordReset: true,
register: true,
addEmail: true,
},
},
customisation: {},
redirects: {
'/templates/index': '/templates/',
},
reloadModuleViewsOnEachRequest: process.env.NODE_ENV === 'development',
rateLimit: {
autoCompile: {
everyone: process.env.RATE_LIMIT_AUTO_COMPILE_EVERYONE || 100,
standard: process.env.RATE_LIMIT_AUTO_COMPILE_STANDARD || 25,
},
},
analytics: {
enabled: false,
},
compileBodySizeLimitMb: process.env.COMPILE_BODY_SIZE_LIMIT_MB || 7,
textExtensions: defaultTextExtensions.concat(
parseTextExtensions(process.env.ADDITIONAL_TEXT_EXTENSIONS)
),
// case-insensitive file names that is editable (doc) in the editor
editableFilenames: ['latexmkrc', '.latexmkrc', 'makefile', 'gnumakefile'],
fileIgnorePattern:
process.env.FILE_IGNORE_PATTERN ||
'**/{{__MACOSX,.git,.texpadtmp,.R}{,/**},.!(latexmkrc),*.{dvi,aux,log,toc,out,pdfsync,synctex,synctex(busy),fdb_latexmk,fls,nlo,ind,glo,gls,glg,bbl,blg,doc,docx,gz,swp}}',
validRootDocExtensions: ['tex', 'Rtex', 'ltx', 'Rnw'],
emailConfirmationDisabled:
process.env.EMAIL_CONFIRMATION_DISABLED === 'true' || false,
emailAddressLimit: intFromEnv('EMAIL_ADDRESS_LIMIT', 10),
enabledServices: (process.env.ENABLED_SERVICES || 'web,api')
.split(',')
.map(s => s.trim()),
// module options
// ----------
modules: {
sanitize: {
options: {
allowedTags: [
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'blockquote',
'p',
'a',
'ul',
'ol',
'nl',
'li',
'b',
'i',
'strong',
'em',
'strike',
'code',
'hr',
'br',
'div',
'table',
'thead',
'col',
'caption',
'tbody',
'tr',
'th',
'td',
'tfoot',
'pre',
'iframe',
'img',
'figure',
'figcaption',
'span',
'source',
'video',
'del',
],
allowedAttributes: {
a: [
'href',
'name',
'target',
'class',
'event-tracking',
'event-tracking-ga',
'event-tracking-label',
'event-tracking-trigger',
],
div: ['class', 'id', 'style'],
h1: ['class', 'id'],
h2: ['class', 'id'],
h3: ['class', 'id'],
h4: ['class', 'id'],
h5: ['class', 'id'],
h6: ['class', 'id'],
col: ['width'],
figure: ['class', 'id', 'style'],
figcaption: ['class', 'id', 'style'],
i: ['aria-hidden', 'aria-label', 'class', 'id'],
iframe: [
'allowfullscreen',
'frameborder',
'height',
'src',
'style',
'width',
],
img: ['alt', 'class', 'src', 'style'],
source: ['src', 'type'],
span: ['class', 'id', 'style'],
strong: ['style'],
table: ['border', 'class', 'id', 'style'],
td: ['colspan', 'rowspan', 'headers', 'style'],
th: [
'abbr',
'headers',
'colspan',
'rowspan',
'scope',
'sorted',
'style',
],
tr: ['class'],
video: ['alt', 'class', 'controls', 'height', 'width'],
},
},
},
},
overleafModuleImports: {
// modules to import (an empty array for each set of modules)
//
// Restart webpack after making changes.
//
createFileModes: [],
devToolbar: [],
gitBridge: [],
publishModal: [],
tprFileViewInfo: [],
tprFileViewRefreshError: [],
tprFileViewRefreshButton: [],
tprFileViewNotOriginalImporter: [],
contactUsModal: [],
editorToolbarButtons: [],
sourceEditorExtensions: [],
sourceEditorComponents: [],
sourceEditorCompletionSources: [],
sourceEditorSymbolPalette: [],
sourceEditorToolbarComponents: [],
writefullEditorPromotion: [],
langFeedbackLinkingWidgets: [],
integrationLinkingWidgets: [],
referenceLinkingWidgets: [],
importProjectFromGithubModalWrapper: [],
importProjectFromGithubMenu: [],
editorLeftMenuSync: [],
editorLeftMenuManageTemplate: [],
oauth2Server: [],
managedGroupSubscriptionEnrollmentNotification: [],
managedGroupEnrollmentInvite: [],
ssoCertificateInfo: [],
// See comment at the definition of these variables.
entryPointsIde,
entryPointsMain,
},
moduleImportSequence: [
'history-v1',
'launchpad',
'server-ce-scripts',
'user-activate',
'track-changes'
],
csp: {
enabled: process.env.CSP_ENABLED === 'true',
reportOnly: process.env.CSP_REPORT_ONLY === 'true',
reportPercentage: parseFloat(process.env.CSP_REPORT_PERCENTAGE) || 0,
reportUri: process.env.CSP_REPORT_URI,
exclude: ['app/views/project/editor'],
},
unsupportedBrowsers: {
ie: '<=11',
safari: '<=13',
},
// ID of the IEEE brand in the rails app
ieeeBrandId: intFromEnv('IEEE_BRAND_ID', 15),
managedUsers: {
enabled: false,
},
}
module.exports.mergeWith = function (overrides) {
return merge(overrides, module.exports)
}

View file

@ -0,0 +1,287 @@
const ChatApiHandler = require('../../../../app/src/Features/Chat/ChatApiHandler')
const ChatManager = require('../../../../app/src/Features/Chat/ChatManager')
const EditorRealTimeController = require('../../../../app/src/Features/Editor/EditorRealTimeController')
const SessionManager = require('../../../../app/src/Features/Authentication/SessionManager')
const UserInfoManager = require('../../../../app/src/Features/User/UserInfoManager')
const DocstoreManager = require('../../../../app/src/Features/Docstore/DocstoreManager')
const DocumentUpdaterHandler = require('../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler')
const CollaboratorsGetter = require('../../../../app/src/Features/Collaborators/CollaboratorsGetter')
const { Project } = require('../../../../app/src/models/Project')
const pLimit = require('p-limit')
async function _updateTCState (projectId, state, callback) {
await Project.updateOne({_id: projectId}, {track_changes: state}).exec()
callback()
}
function _transformId(doc) {
if (doc._id) {
doc.id = doc._id;
delete doc._id;
}
return doc;
}
const TrackChangesController = {
trackChanges(req, res, next) {
const { project_id } = req.params
let state = req.body.on || req.body.on_for
if ( req.body.on_for_guests && !req.body.on ) state.__guests__ = true
return _updateTCState(project_id, state,
function (err, message) {
if (err != null) {
return next(err)
}
EditorRealTimeController.emitToRoom(
project_id,
'toggle-track-changes',
state
)
return res.sendStatus(204)
}
)
},
acceptChanges(req, res, next) {
const { project_id, doc_id } = req.params
const change_ids = req.body.change_ids
return DocumentUpdaterHandler.acceptChanges(
project_id,
doc_id,
change_ids,
function (err, message) {
if (err != null) {
return next(err)
}
EditorRealTimeController.emitToRoom(
project_id,
'accept-changes',
doc_id,
change_ids,
)
return res.sendStatus(204)
}
)
},
async getAllRanges(req, res, next) {
const { project_id } = req.params
// FIXME: ranges are from mongodb, probably already outdated
const ranges = await DocstoreManager.promises.getAllRanges(project_id)
// frontend expects 'id', not '_id'
return res.json(ranges.map(_transformId))
},
async getChangesUsers(req, res, next) {
const { project_id } = req.params
const memberIds = await CollaboratorsGetter.promises.getMemberIds(project_id)
// FIXME: Does not work properly if the user is no longer a member of the project
// memberIds from DocstoreManager.getAllRanges(project_id) is not a remedy
// because ranges are not updated in real-time
const limit = pLimit(3)
const users = await Promise.all(
memberIds.map(memberId =>
limit(async () => {
const user = await UserInfoManager.promises.getPersonalInfo(memberId)
return user
})
)
)
users.push({_id: null}) // An anonymous user won't cause any harm
// frontend expects 'id', not '_id'
return res.json(users.map(_transformId))
},
getThreads(req, res, next) {
const { project_id } = req.params
return ChatApiHandler.getThreads(
project_id,
function (err, messages) {
if (err != null) {
return next(err)
}
return ChatManager.injectUserInfoIntoThreads(
messages,
function (err) {
if (err != null) {
return next(err)
}
return res.json(messages)
}
)
}
)
},
sendComment(req, res, next) {
const { project_id, thread_id } = req.params
const { content } = req.body
const user_id = SessionManager.getLoggedInUserId(req.session)
if (user_id == null) {
const err = new Error('no logged-in user')
return next(err)
}
return ChatApiHandler.sendComment(
project_id,
thread_id,
user_id,
content,
function (err, message) {
if (err != null) {
return next(err)
}
return UserInfoManager.getPersonalInfo(
user_id,
function (err, user) {
if (err != null) {
return next(err)
}
message.user = user
EditorRealTimeController.emitToRoom(
project_id,
'new-comment',
thread_id, message
)
return res.sendStatus(204)
}
)
}
)
},
editMessage(req, res, next) {
const { project_id, thread_id, message_id } = req.params
const { content } = req.body
const user_id = SessionManager.getLoggedInUserId(req.session)
if (user_id == null) {
const err = new Error('no logged-in user')
return next(err)
}
return ChatApiHandler.editMessage(
project_id,
thread_id,
message_id,
user_id,
content,
function (err, message) {
if (err != null) {
return next(err)
}
EditorRealTimeController.emitToRoom(
project_id,
'edit-message',
thread_id,
message_id,
content
)
return res.sendStatus(204)
}
)
},
deleteMessage(req, res, next) {
const { project_id, thread_id, message_id } = req.params
return ChatApiHandler.deleteMessage(
project_id,
thread_id,
message_id,
function (err, message) {
if (err != null) {
return next(err)
}
EditorRealTimeController.emitToRoom(
project_id,
'delete-message',
thread_id,
message_id
)
return res.sendStatus(204)
}
)
},
resolveThread(req, res, next) {
const { project_id, thread_id } = req.params
const user_id = SessionManager.getLoggedInUserId(req.session)
if (user_id == null) {
const err = new Error('no logged-in user')
return next(err)
}
return ChatApiHandler.resolveThread(
project_id,
thread_id,
user_id,
function (err, message) {
if (err != null) {
return next(err)
}
return UserInfoManager.getPersonalInfo(
user_id,
function (err, user) {
if (err != null) {
return next(err)
}
EditorRealTimeController.emitToRoom(
project_id,
'resolve-thread',
thread_id,
user
)
return res.sendStatus(204)
}
)
}
)
},
reopenThread(req, res, next) {
const { project_id, thread_id } = req.params
const user_id = SessionManager.getLoggedInUserId(req.session)
if (user_id == null) {
const err = new Error('no logged-in user')
return next(err)
}
return ChatApiHandler.reopenThread(
project_id,
thread_id,
function (err, message) {
if (err != null) {
return next(err)
}
EditorRealTimeController.emitToRoom(
project_id,
'reopen-thread',
thread_id
)
return res.sendStatus(204)
}
)
},
deleteThread(req, res, next) {
const { project_id, doc_id, thread_id } = req.params
const user_id = SessionManager.getLoggedInUserId(req.session)
if (user_id == null) {
const err = new Error('no logged-in user')
return next(err)
}
return DocumentUpdaterHandler.deleteThread(
project_id,
doc_id,
thread_id,
user_id,
function (err, message) {
if (err != null) {
return next(err)
}
}
)
ChatApiHandler.deleteThread(
project_id,
thread_id,
function (err, message) {
if (err != null) {
return next(err)
}
EditorRealTimeController.emitToRoom(
project_id,
'delete-thread',
thread_id
)
return res.sendStatus(204)
}
)
}
}
module.exports = TrackChangesController

View file

@ -0,0 +1,72 @@
const logger = require('@overleaf/logger')
const AuthorizationMiddleware = require('../../../../app/src/Features/Authorization/AuthorizationMiddleware')
const TrackChangesController = require('./TrackChangesController')
module.exports = {
apply(webRouter) {
logger.debug({}, 'Init track-changes router')
webRouter.post('/project/:project_id/track_changes',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.trackChanges
)
webRouter.post('/project/:project_id/doc/:doc_id/changes/accept',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.acceptChanges
)
webRouter.get('/project/:project_id/ranges',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.getAllRanges
)
webRouter.get('/project/:project_id/changes/users',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.getChangesUsers
)
webRouter.get(
'/project/:project_id/threads',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.getThreads
)
webRouter.post(
'/project/:project_id/thread/:thread_id/messages',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.sendComment
)
webRouter.post(
'/project/:project_id/thread/:thread_id/messages/:message_id/edit',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.editMessage
)
webRouter.delete(
'/project/:project_id/thread/:thread_id/messages/:message_id',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.deleteMessage
)
webRouter.post(
'/project/:project_id/thread/:thread_id/resolve',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.resolveThread
)
webRouter.post(
'/project/:project_id/thread/:thread_id/reopen',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.reopenThread
)
webRouter.delete(
'/project/:project_id/doc/:doc_id/thread/:thread_id',
AuthorizationMiddleware.blockRestrictedUserFromProject,
AuthorizationMiddleware.ensureUserCanReadProject,
TrackChangesController.deleteThread
)
},
}

View file

@ -0,0 +1,2 @@
const TrackChangesRouter = require('./app/src/TrackChangesRouter')
module.exports = { router : TrackChangesRouter }