diff --git a/README.md b/README.md index 524a831262..33e7a8956b 100644 --- a/README.md +++ b/README.md @@ -14,44 +14,52 @@ License

-A screenshot of a project being edited in Overleaf Community Edition +A screenshot of a project being edited in Overleaf Extended Community Edition

- Figure 1: A screenshot of a project being edited in Overleaf Community Edition. + Figure 1: A screenshot of a project being edited in Overleaf Extended Community Edition.

## Community Edition -[Overleaf](https://www.overleaf.com) is an open-source online real-time collaborative LaTeX editor. We run a hosted version at [www.overleaf.com](https://www.overleaf.com), but you can also run your own local version, and contribute to the development of Overleaf. +[Overleaf](https://www.overleaf.com) is an open-source online real-time collaborative LaTeX editor. Overleaf runs a hosted version at [www.overleaf.com](https://www.overleaf.com), but you can also run your own local version, and contribute to the development of Overleaf. + +## Extended Community Edition + +The present "extended" version of Overleaf CE includes: + +- Template Gallery +- Sandboxed Compiles with TeX Live image selection +- LDAP authentication +- SAML authentication +- OpenID Connect authentication +- Real-time track changes and comments +- Autocomplete of reference keys +- Symbol Palette +- "From External URL" feature > [!CAUTION] -> Overleaf Community Edition is intended for use in environments where **all** users are trusted. Community Edition is **not** appropriate for scenarios where isolation of users is required due to Sandbox Compiles not being available. When not using Sandboxed Compiles, users have full read and write access to the `sharelatex` container resources (filesystem, network, environment variables) when running LaTeX compiles. +> Overleaf Community Edition is intended for use in environments where **all** users are trusted. Community Edition is **not** appropriate for scenarios where isolation of users is required due to Sandbox Compiles not being available. When not using Sandboxed Compiles, users have full read and write access to the `sharelatex` container resources (filesystem, network, environment variables) when running LaTeX compiles. +Therefore, in any environment where not all users can be fully trusted, it is strongly recommended to enable the Sandboxed Compiles feature available in the Extended Community Edition. -For more information on Sandbox Compiles check out our [documentation](https://docs.overleaf.com/on-premises/configuration/overleaf-toolkit/server-pro-only-configuration/sandboxed-compiles). +For more information on Sandbox Compiles check out Overleaf [documentation](https://docs.overleaf.com/on-premises/configuration/overleaf-toolkit/server-pro-only-configuration/sandboxed-compiles). ## Enterprise -If you want help installing and maintaining Overleaf in your lab or workplace, we offer an officially supported version called [Overleaf Server Pro](https://www.overleaf.com/for/enterprises). It also includes more features for security (SSO with LDAP or SAML), administration and collaboration (e.g. tracked changes). [Find out more!](https://www.overleaf.com/for/enterprises) - -## Keeping up to date - -Sign up to the [mailing list](https://mailchi.mp/overleaf.com/community-edition-and-server-pro) to get updates on Overleaf releases and development. +If you want help installing and maintaining Overleaf in your lab or workplace, Overleaf offers an officially supported version called [Overleaf Server Pro](https://www.overleaf.com/for/enterprises). ## Installation -We have detailed installation instructions in the [Overleaf Toolkit](https://github.com/overleaf/toolkit/). - -## Upgrading - -If you are upgrading from a previous version of Overleaf, please see the [Release Notes section on the Wiki](https://github.com/overleaf/overleaf/wiki#release-notes) for all of the versions between your current version and the version you are upgrading to. +Detailed installation instructions can be found in the [Overleaf Toolkit](https://github.com/overleaf/toolkit/). +Configuration details and release history for the Extended Community Edition can be found on the [Extended CE Wiki Page](https://github.com/yu-i-i/overleaf-cep/wiki). ## Overleaf Docker Image This repo contains two dockerfiles, [`Dockerfile-base`](server-ce/Dockerfile-base), which builds the -`sharelatex/sharelatex-base` image, and [`Dockerfile`](server-ce/Dockerfile) which builds the -`sharelatex/sharelatex` (or "community") image. +`sharelatex/sharelatex-base:ext-ce` image, and [`Dockerfile`](server-ce/Dockerfile) which builds the +`sharelatex/sharelatex:ext-ce` image. The Base image generally contains the basic dependencies like `wget`, plus `texlive`. -We split this out because it's a pretty heavy set of +This is split out because it's a pretty heavy set of dependencies, and it's nice to not have to rebuild all of that every time. The `sharelatex/sharelatex` image extends the base image and adds the actual Overleaf code @@ -59,20 +67,16 @@ and services. Use `make build-base` and `make build-community` from `server-ce/` to build these images. -We use the [Phusion base-image](https://github.com/phusion/baseimage-docker) -(which is extended by our `base` image) to provide us with a VM-like container +The [Phusion base-image](https://github.com/phusion/baseimage-docker) +(which is extended by the `base` image) provides a VM-like container in which to run the Overleaf services. Baseimage uses the `runit` service -manager to manage services, and we add our init-scripts from the `server-ce/runit` -folder. - - -## Contributing - -Please see the [CONTRIBUTING](CONTRIBUTING.md) file for information on contributing to the development of Overleaf. +manager to manage services, and init scripts from the `server-ce/runit` +folder are added. ## Authors -[The Overleaf Team](https://www.overleaf.com/about) +[The Overleaf Team](https://www.overleaf.com/about) +[yu-i-i](https://github.com/yu-i-i/overleaf-cep) — Extensions for CE unless otherwise noted ## License diff --git a/develop/README.md b/develop/README.md index 8d45383c23..8e3f89862c 100644 --- a/develop/README.md +++ b/develop/README.md @@ -77,6 +77,7 @@ each service: | `filestore` | 9235 | | `notifications` | 9236 | | `real-time` | 9237 | +| `references` | 9238 | | `history-v1` | 9239 | | `project-history` | 9240 | diff --git a/develop/dev.env b/develop/dev.env index 6ebbbb1ffd..b003e4e0eb 100644 --- a/develop/dev.env +++ b/develop/dev.env @@ -15,6 +15,7 @@ PROJECT_HISTORY_HOST=project-history QUEUES_REDIS_HOST=redis REALTIME_HOST=real-time REDIS_HOST=redis +REFERENCES_HOST=references SESSION_SECRET=foo V1_HISTORY_HOST=history-v1 WEBPACK_HOST=webpack diff --git a/develop/docker-compose.dev.yml b/develop/docker-compose.dev.yml index 3d2fca7e0b..65e15ef07b 100644 --- a/develop/docker-compose.dev.yml +++ b/develop/docker-compose.dev.yml @@ -112,6 +112,17 @@ services: - ../services/real-time/app.js:/overleaf/services/real-time/app.js - ../services/real-time/config:/overleaf/services/real-time/config + references: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9238:9229" + volumes: + - ../services/references/app:/overleaf/services/references/app + - ../services/references/config:/overleaf/services/references/config + - ../services/references/app.js:/overleaf/services/references/app.js + web: command: ["node", "--watch", "app.mjs", "--watch-locales"] environment: diff --git a/develop/docker-compose.yml b/develop/docker-compose.yml index 7161e0686a..e5b84c38b3 100644 --- a/develop/docker-compose.yml +++ b/develop/docker-compose.yml @@ -123,7 +123,7 @@ services: dockerfile: services/real-time/Dockerfile env_file: - dev.env - + redis: image: redis:5 ports: @@ -131,6 +131,13 @@ services: volumes: - redis-data:/data + references: + build: + context: .. + dockerfile: services/references/Dockerfile + env_file: + - dev.env + web: build: context: .. @@ -140,7 +147,7 @@ services: - dev.env environment: - APP_NAME=Overleaf Community Edition - - ENABLED_LINKED_FILE_TYPES=project_file,project_output_file + - ENABLED_LINKED_FILE_TYPES=project_file,project_output_file,url - EMAIL_CONFIRMATION_DISABLED=true - NODE_ENV=development - OVERLEAF_ALLOW_PUBLIC_ACCESS=true @@ -161,6 +168,7 @@ services: - notifications - project-history - real-time + - references webpack: build: diff --git a/doc/screenshot.png b/doc/screenshot.png index 1c1f339630..92633192a5 100644 Binary files a/doc/screenshot.png and b/doc/screenshot.png differ diff --git a/docker-compose.yml b/docker-compose.yml index e257716789..962adfb5d8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -32,7 +32,7 @@ services: OVERLEAF_REDIS_HOST: redis REDIS_HOST: redis - ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file' + ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file,url' # Enables Thumbnail generation using ImageMagick ENABLE_CONVERSIONS: 'true' diff --git a/package-lock.json b/package-lock.json index 2b3a5868a2..d9d8285618 100644 --- a/package-lock.json +++ b/package-lock.json @@ -35581,6 +35581,7 @@ "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142", + "license": "Apache-2.0", "dependencies": { "aws-sign2": "~0.7.0", "aws4": "^1.8.0", @@ -35638,15 +35639,15 @@ } }, "node_modules/request/node_modules/tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz", + "integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==", + "license": "BSD-3-Clause", "dependencies": { - "psl": "^1.1.28", - "punycode": "^2.1.1" + "tldts": "^6.1.32" }, "engines": { - "node": ">=0.8" + "node": ">=16" } }, "node_modules/requestretry": { @@ -39612,6 +39613,24 @@ "tlds": "bin.js" } }, + "node_modules/tldts": { + "version": "6.1.86", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.86.tgz", + "integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==", + "license": "MIT", + "dependencies": { + "tldts-core": "^6.1.86" + }, + "bin": { + "tldts": "bin/cli.js" + } + }, + "node_modules/tldts-core": { + "version": "6.1.86", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.86.tgz", + "integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==", + "license": "MIT" + }, "node_modules/tmp": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", diff --git a/package.json b/package.json index 388b750c3d..44fffc4664 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,9 @@ "path-to-regexp": "3.3.0", "body-parser": "1.20.3", "multer": "2.0.1" + }, + "request@2.88.2": { + "tough-cookie": "5.1.2" } }, "scripts": { diff --git a/patches/@node-saml+node-saml+4.0.5.patch b/patches/@node-saml+node-saml+4.0.5.patch new file mode 100644 index 0000000000..81fd700b31 --- /dev/null +++ b/patches/@node-saml+node-saml+4.0.5.patch @@ -0,0 +1,23 @@ +diff --git a/node_modules/@node-saml/node-saml/lib/saml.js b/node_modules/@node-saml/node-saml/lib/saml.js +index fba15b9..a5778cb 100644 +--- a/node_modules/@node-saml/node-saml/lib/saml.js ++++ b/node_modules/@node-saml/node-saml/lib/saml.js +@@ -336,7 +336,8 @@ class SAML { + const requestOrResponse = request || response; + (0, utility_1.assertRequired)(requestOrResponse, "either request or response is required"); + let buffer; +- if (this.options.skipRequestCompression) { ++ // logout requestOrResponse must be compressed anyway ++ if (this.options.skipRequestCompression && operation !== "logout") { + buffer = Buffer.from(requestOrResponse, "utf8"); + } + else { +@@ -495,7 +496,7 @@ class SAML { + try { + xml = Buffer.from(container.SAMLResponse, "base64").toString("utf8"); + doc = await (0, xml_1.parseDomFromString)(xml); +- const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response']/@InResponseTo"); ++ const inResponseToNodes = xml_1.xpath.selectAttributes(doc, "/*[local-name()='Response' or local-name()='LogoutResponse']/@InResponseTo"); + if (inResponseToNodes) { + inResponseTo = inResponseToNodes.length ? inResponseToNodes[0].nodeValue : null; + await this.validateInResponseTo(inResponseTo); diff --git a/patches/ldapauth-fork+4.3.3.patch b/patches/ldapauth-fork+4.3.3.patch new file mode 100644 index 0000000000..4d31210c9d --- /dev/null +++ b/patches/ldapauth-fork+4.3.3.patch @@ -0,0 +1,64 @@ +diff --git a/node_modules/ldapauth-fork/lib/ldapauth.js b/node_modules/ldapauth-fork/lib/ldapauth.js +index 85ecf36a8b..a7d07e0f78 100644 +--- a/node_modules/ldapauth-fork/lib/ldapauth.js ++++ b/node_modules/ldapauth-fork/lib/ldapauth.js +@@ -69,6 +69,7 @@ function LdapAuth(opts) { + this.opts.bindProperty || (this.opts.bindProperty = 'dn'); + this.opts.groupSearchScope || (this.opts.groupSearchScope = 'sub'); + this.opts.groupDnProperty || (this.opts.groupDnProperty = 'dn'); ++ this.opts.tlsStarted = false; + + EventEmitter.call(this); + +@@ -108,21 +109,7 @@ function LdapAuth(opts) { + this._userClient.on('error', this._handleError.bind(this)); + + var self = this; +- if (this.opts.starttls) { +- // When starttls is enabled, this callback supplants the 'connect' callback +- this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function(err) { +- if (err) { +- self._handleError(err); +- } else { +- self._onConnectAdmin(); +- } +- }); +- this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function(err) { +- if (err) { +- self._handleError(err); +- } +- }); +- } else if (opts.reconnect) { ++ if (opts.reconnect && !this.opts.starttls) { + this.once('_installReconnectListener', function() { + self.log && self.log.trace('install reconnect listener'); + self._adminClient.on('connect', function() { +@@ -384,6 +371,28 @@ LdapAuth.prototype._findGroups = function(user, callback) { + */ + LdapAuth.prototype.authenticate = function(username, password, callback) { + var self = this; ++ if (this.opts.starttls && !this.opts.tlsStarted) { ++ // When starttls is enabled, this callback supplants the 'connect' callback ++ this._adminClient.starttls(this.opts.tlsOptions, this._adminClient.controls, function (err) { ++ if (err) { ++ self._handleError(err); ++ } else { ++ self._onConnectAdmin(function(){self._handleAuthenticate(username, password, callback);}); ++ } ++ }); ++ this._userClient.starttls(this.opts.tlsOptions, this._userClient.controls, function (err) { ++ if (err) { ++ self._handleError(err); ++ } ++ }); ++ } else { ++ self._handleAuthenticate(username, password, callback); ++ } ++}; ++ ++LdapAuth.prototype._handleAuthenticate = function (username, password, callback) { ++ this.opts.tlsStarted = true; ++ var self = this; + + if (typeof password === 'undefined' || password === null || password === '') { + return callback(new Error('no password given')); diff --git a/server-ce/Makefile b/server-ce/Makefile index 853a99d05e..8e2fe7d14f 100644 --- a/server-ce/Makefile +++ b/server-ce/Makefile @@ -24,6 +24,7 @@ build-base: --cache-from $(OVERLEAF_BASE_BRANCH) \ --tag $(OVERLEAF_BASE_TAG) \ --tag $(OVERLEAF_BASE_BRANCH) \ + --network=host \ $(MONOREPO_ROOT) @@ -39,6 +40,7 @@ build-community: --file Dockerfile \ --tag $(OVERLEAF_TAG) \ --tag $(OVERLEAF_BRANCH) \ + --network=host \ $(MONOREPO_ROOT) SHELLCHECK_OPTS = \ diff --git a/server-ce/config/env.sh b/server-ce/config/env.sh index b12ca242d3..81cebe4caa 100644 --- a/server-ce/config/env.sh +++ b/server-ce/config/env.sh @@ -9,5 +9,6 @@ export HISTORY_V1_HOST=127.0.0.1 export NOTIFICATIONS_HOST=127.0.0.1 export PROJECT_HISTORY_HOST=127.0.0.1 export REALTIME_HOST=127.0.0.1 +export REFERENCES_HOST=127.0.0.1 export WEB_HOST=127.0.0.1 export WEB_API_HOST=127.0.0.1 diff --git a/server-ce/runit/references-overleaf/run b/server-ce/runit/references-overleaf/run new file mode 100755 index 0000000000..875023df9f --- /dev/null +++ b/server-ce/runit/references-overleaf/run @@ -0,0 +1,12 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - references" + NODE_PARAMS="--inspect=0.0.0.0:30560" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/references/app.js >> /var/log/overleaf/references.log 2>&1 diff --git a/server-ce/services.js b/server-ce/services.js index d0b0a9c076..e0282f3bad 100644 --- a/server-ce/services.js +++ b/server-ce/services.js @@ -29,6 +29,9 @@ module.exports = [ { name: 'project-history', }, + { + name: 'references', + }, { name: 'history-v1', }, diff --git a/server-ce/test/Makefile b/server-ce/test/Makefile index 6c56b7e8fe..fb7c980293 100644 --- a/server-ce/test/Makefile +++ b/server-ce/test/Makefile @@ -21,9 +21,11 @@ test-e2e-native: test-e2e: docker compose build host-admin + docker compose up -d host-admin docker compose up --no-log-prefix --exit-code-from=e2e e2e test-e2e-open: + docker compose up -d host-admin docker compose up --no-log-prefix --exit-code-from=e2e-open e2e-open clean: diff --git a/server-ce/test/docker-compose.yml b/server-ce/test/docker-compose.yml index 029b73fc62..d16c5e2b71 100644 --- a/server-ce/test/docker-compose.yml +++ b/server-ce/test/docker-compose.yml @@ -20,7 +20,7 @@ services: OVERLEAF_EMAIL_SMTP_HOST: 'mailtrap' OVERLEAF_EMAIL_SMTP_PORT: '25' OVERLEAF_EMAIL_SMTP_IGNORE_TLS: 'true' - ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file' + ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file,url' ENABLE_CONVERSIONS: 'true' EMAIL_CONFIRMATION_DISABLED: 'true' healthcheck: @@ -35,7 +35,7 @@ services: MAILTRAP_PASSWORD: 'password-for-mailtrap' mongo: - image: mongo:6.0 + image: mongo:8.0.11 command: '--replSet overleaf' volumes: - ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/server-ce/test/editor.spec.ts b/server-ce/test/editor.spec.ts index d0060518de..3e57b94f8f 100644 --- a/server-ce/test/editor.spec.ts +++ b/server-ce/test/editor.spec.ts @@ -2,6 +2,7 @@ import { createNewFile, createProject, openProjectById, + testNewFileUpload, } from './helpers/project' import { isExcludedBySharding, startWith } from './helpers/config' import { ensureUserExists, login } from './helpers/login' @@ -119,24 +120,7 @@ describe('editor', () => { cy.get('button').contains('New file').click({ force: true }) }) - it('can upload file', () => { - const name = `${uuid()}.txt` - const content = `Test File Content ${name}` - cy.get('button').contains('Upload').click({ force: true }) - cy.get('input[type=file]') - .first() - .selectFile( - { - contents: Cypress.Buffer.from(content), - fileName: name, - lastModified: Date.now(), - }, - { force: true } - ) - // force: The file-tree pane is too narrow to display the full name. - cy.findByTestId('file-tree').findByText(name).click({ force: true }) - cy.findByText(content) - }) + testNewFileUpload() it('should not display import from URL', () => { cy.findByText('From external URL').should('not.exist') diff --git a/server-ce/test/filestore-migration.spec.ts b/server-ce/test/filestore-migration.spec.ts new file mode 100644 index 0000000000..25875ad374 --- /dev/null +++ b/server-ce/test/filestore-migration.spec.ts @@ -0,0 +1,104 @@ +import { ensureUserExists, login } from './helpers/login' +import { + createProject, + openProjectById, + prepareFileUploadTest, +} from './helpers/project' +import { isExcludedBySharding, startWith } from './helpers/config' +import { prepareWaitForNextCompileSlot } from './helpers/compile' +import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry' +import { v4 as uuid } from 'uuid' +import { purgeFilestoreData, runScript } from './helpers/hostAdminClient' + +describe('filestore migration', function () { + if (isExcludedBySharding('CE_CUSTOM_3')) return + startWith({ withDataDir: true, resetData: true, vars: {} }) + ensureUserExists({ email: 'user@example.com' }) + + let projectName: string + let projectId: string + let waitForCompileRateLimitCoolOff: (fn: () => void) => void + const previousBinaryFiles: (() => void)[] = [] + beforeWithReRunOnTestRetry(function () { + projectName = `project-${uuid()}` + login('user@example.com') + createProject(projectName, { type: 'Example project' }).then( + id => (projectId = id) + ) + let queueReset + ;({ waitForCompileRateLimitCoolOff, queueReset } = + prepareWaitForNextCompileSlot()) + queueReset() + previousBinaryFiles.push(prepareFileUploadTest(true)) + }) + + beforeEach(() => { + login('user@example.com') + waitForCompileRateLimitCoolOff(() => { + openProjectById(projectId) + }) + }) + + function checkFilesAreAccessible() { + it('can upload new binary file and read previous uploads', function () { + previousBinaryFiles.push(prepareFileUploadTest(true)) + for (const check of previousBinaryFiles) { + check() + } + }) + + it('renders frog jpg', () => { + cy.findByTestId('file-tree').findByText('frog.jpg').click() + cy.get('[alt="frog.jpg"]') + .should('be.visible') + .and('have.prop', 'naturalWidth') + .should('be.greaterThan', 0) + }) + } + + describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL not set', function () { + startWith({ withDataDir: true, vars: {} }) + checkFilesAreAccessible() + }) + + describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=0', function () { + startWith({ + withDataDir: true, + vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '0' }, + }) + checkFilesAreAccessible() + + describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=1', function () { + startWith({ + withDataDir: true, + vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' }, + }) + checkFilesAreAccessible() + + describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=2', function () { + startWith({ + withDataDir: true, + vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' }, + }) + before(async function () { + await runScript({ + cwd: 'services/history-v1', + script: 'storage/scripts/back_fill_file_hash.mjs', + }) + }) + startWith({ + withDataDir: true, + vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '2' }, + }) + checkFilesAreAccessible() + + describe('purge filestore data', function () { + before(async function () { + await purgeFilestoreData() + }) + checkFilesAreAccessible() + }) + }) + }) + }) +}) diff --git a/server-ce/test/helpers/config.ts b/server-ce/test/helpers/config.ts index 030e70ceb5..78e81be1f7 100644 --- a/server-ce/test/helpers/config.ts +++ b/server-ce/test/helpers/config.ts @@ -9,6 +9,7 @@ export function isExcludedBySharding( | 'CE_DEFAULT' | 'CE_CUSTOM_1' | 'CE_CUSTOM_2' + | 'CE_CUSTOM_3' | 'PRO_DEFAULT_1' | 'PRO_DEFAULT_2' | 'PRO_CUSTOM_1' diff --git a/server-ce/test/helpers/hostAdminClient.ts b/server-ce/test/helpers/hostAdminClient.ts index cafeaa2db6..dadfe2b059 100644 --- a/server-ce/test/helpers/hostAdminClient.ts +++ b/server-ce/test/helpers/hostAdminClient.ts @@ -85,6 +85,12 @@ export async function getRedisKeys() { return stdout.split('\n') } +export async function purgeFilestoreData() { + await fetchJSON(`${hostAdminURL}/data/user_files`, { + method: 'DELETE', + }) +} + async function sleep(ms: number) { return new Promise(resolve => { setTimeout(resolve, ms) diff --git a/server-ce/test/helpers/project.ts b/server-ce/test/helpers/project.ts index abcce3f9b2..4b3197afed 100644 --- a/server-ce/test/helpers/project.ts +++ b/server-ce/test/helpers/project.ts @@ -216,3 +216,43 @@ export function createNewFile() { return fileName } + +export function prepareFileUploadTest(binary = false) { + const name = `${uuid()}.txt` + const content = `Test File Content ${name}${binary ? ' \x00' : ''}` + cy.get('button').contains('Upload').click({ force: true }) + cy.get('input[type=file]') + .first() + .selectFile( + { + contents: Cypress.Buffer.from(content), + fileName: name, + lastModified: Date.now(), + }, + { force: true } + ) + + // wait for the upload to finish + cy.findByRole('treeitem', { name }) + + return function check() { + cy.findByRole('treeitem', { name }).click() + if (binary) { + cy.findByText(content).should('not.have.class', 'cm-line') + } else { + cy.findByText(content).should('have.class', 'cm-line') + } + } +} + +export function testNewFileUpload() { + it('can upload text file', () => { + const check = prepareFileUploadTest(false) + check() + }) + + it('can upload binary file', () => { + const check = prepareFileUploadTest(true) + check() + }) +} diff --git a/server-ce/test/host-admin.js b/server-ce/test/host-admin.js index f73209d58f..b3dcd72b1f 100644 --- a/server-ce/test/host-admin.js +++ b/server-ce/test/host-admin.js @@ -29,6 +29,17 @@ const IMAGES = { PRO: process.env.IMAGE_TAG_PRO.replace(/:.+/, ''), } +function defaultDockerComposeOverride() { + return { + services: { + sharelatex: { + environment: {}, + }, + 'git-bridge': {}, + }, + } +} + let previousConfig = '' function readDockerComposeOverride() { @@ -38,14 +49,7 @@ function readDockerComposeOverride() { if (error.code !== 'ENOENT') { throw error } - return { - services: { - sharelatex: { - environment: {}, - }, - 'git-bridge': {}, - }, - } + return defaultDockerComposeOverride } } @@ -77,12 +81,21 @@ app.use(bodyParser.json()) app.use((req, res, next) => { // Basic access logs console.log(req.method, req.url, req.body) + const json = res.json + res.json = body => { + console.log(req.method, req.url, req.body, '->', body) + json.call(res, body) + } + next() +}) +app.use((req, res, next) => { // Add CORS headers const accessControlAllowOrigin = process.env.ACCESS_CONTROL_ALLOW_ORIGIN || 'http://sharelatex' res.setHeader('Access-Control-Allow-Origin', accessControlAllowOrigin) res.setHeader('Access-Control-Allow-Headers', 'Content-Type') res.setHeader('Access-Control-Max-Age', '3600') + res.setHeader('Access-Control-Allow-Methods', 'DELETE, GET, HEAD, POST, PUT') next() }) @@ -133,6 +146,7 @@ const allowedVars = Joi.object( 'V1_HISTORY_URL', 'SANDBOXED_COMPILES', 'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES', + 'OVERLEAF_FILESTORE_MIGRATION_LEVEL', 'OVERLEAF_TEMPLATES_USER_ID', 'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS', 'OVERLEAF_ALLOW_PUBLIC_ACCESS', @@ -319,8 +333,19 @@ app.get('/redis/keys', (req, res) => { ) }) +app.delete('/data/user_files', (req, res) => { + runDockerCompose( + 'exec', + ['sharelatex', 'rm', '-rf', '/var/lib/overleaf/data/user_files'], + (error, stdout, stderr) => { + res.json({ error, stdout, stderr }) + } + ) +}) + app.use(handleValidationErrors()) purgeDataDir() +writeDockerComposeOverride(defaultDockerComposeOverride()) app.listen(80) diff --git a/services/chat/docker-compose.ci.yml b/services/chat/docker-compose.ci.yml index 24b57ab084..ca3303a079 100644 --- a/services/chat/docker-compose.ci.yml +++ b/services/chat/docker-compose.ci.yml @@ -42,7 +42,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/chat/docker-compose.yml b/services/chat/docker-compose.yml index ddc5f9e698..e7b8ce7385 100644 --- a/services/chat/docker-compose.yml +++ b/services/chat/docker-compose.yml @@ -44,7 +44,7 @@ services: command: npm run --silent test:acceptance mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/clsi/app/js/CompileController.js b/services/clsi/app/js/CompileController.js index 7329c14342..b3343ee233 100644 --- a/services/clsi/app/js/CompileController.js +++ b/services/clsi/app/js/CompileController.js @@ -129,7 +129,7 @@ function compile(req, res, next) { compiler: request.compiler, draft: request.draft, imageName: request.imageName - ? Path.basename(request.imageName) + ? request.imageName : undefined, rootResourcePath: request.rootResourcePath, stopOnFirstError: request.stopOnFirstError, diff --git a/services/clsi/app/js/DockerRunner.js b/services/clsi/app/js/DockerRunner.js index def02eaf5b..97053c1875 100644 --- a/services/clsi/app/js/DockerRunner.js +++ b/services/clsi/app/js/DockerRunner.js @@ -232,8 +232,8 @@ const DockerRunner = { } } // set the path based on the image year - const match = image.match(/:([0-9]+)\.[0-9]+/) - const year = match ? match[1] : '2014' + const match = image.match(/:([0-9]+)\.[0-9]+|:TL([0-9]+)/) + const year = match ? match[1] || match[2] : '2014' env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/` const options = { Cmd: command, diff --git a/services/clsi/config/settings.defaults.js b/services/clsi/config/settings.defaults.js index 1d82258a8e..bd5614eb98 100644 --- a/services/clsi/config/settings.defaults.js +++ b/services/clsi/config/settings.defaults.js @@ -107,7 +107,7 @@ if ((process.env.DOCKER_RUNNER || process.env.SANDBOXED_COMPILES) === 'true') { CLSI: 1, }, socketPath: '/var/run/docker.sock', - user: process.env.TEXLIVE_IMAGE_USER || 'tex', + user: process.env.TEXLIVE_IMAGE_USER || 'www-data', }, optimiseInDocker: true, expireProjectAfterIdleMs: 24 * 60 * 60 * 1000, diff --git a/services/clsi/seccomp/clsi-profile.json b/services/clsi/seccomp/clsi-profile.json index 084354b15c..ad95130f76 100644 --- a/services/clsi/seccomp/clsi-profile.json +++ b/services/clsi/seccomp/clsi-profile.json @@ -829,13 +829,19 @@ "args": [] }, { - "name": "gettimeofday", - "action": "SCMP_ACT_ALLOW", - "args": [] - }, { - "name": "epoll_pwait", - "action": "SCMP_ACT_ALLOW", - "args": [] + "name": "gettimeofday", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "epoll_pwait", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "poll", + "action": "SCMP_ACT_ALLOW", + "args": [] } ] -} \ No newline at end of file +} diff --git a/services/contacts/docker-compose.ci.yml b/services/contacts/docker-compose.ci.yml index 24b57ab084..ca3303a079 100644 --- a/services/contacts/docker-compose.ci.yml +++ b/services/contacts/docker-compose.ci.yml @@ -42,7 +42,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/contacts/docker-compose.yml b/services/contacts/docker-compose.yml index 6c77ef5e31..474ea224f8 100644 --- a/services/contacts/docker-compose.yml +++ b/services/contacts/docker-compose.yml @@ -44,7 +44,7 @@ services: command: npm run --silent test:acceptance mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/docstore/docker-compose.ci.yml b/services/docstore/docker-compose.ci.yml index 40decc4aea..cdb4783c5a 100644 --- a/services/docstore/docker-compose.ci.yml +++ b/services/docstore/docker-compose.ci.yml @@ -47,7 +47,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/docstore/docker-compose.yml b/services/docstore/docker-compose.yml index 8c11eb5a91..a9099c7e7b 100644 --- a/services/docstore/docker-compose.yml +++ b/services/docstore/docker-compose.yml @@ -49,7 +49,7 @@ services: command: npm run --silent test:acceptance mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index ca15f35fef..c6ec24a84b 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -55,7 +55,7 @@ services: retries: 20 mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index cf7c9a2eb6..c1b23c11c5 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -57,7 +57,7 @@ services: retries: 20 mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/filestore/app.js b/services/filestore/app.js index 24741e079c..178e8c7ff0 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -111,6 +111,11 @@ if (settings.filestore.stores.template_files) { keyBuilder.templateFileKeyMiddleware, fileController.insertFile ) + app.delete( + '/template/:template_id/v/:version/:format', + keyBuilder.templateFileKeyMiddleware, + fileController.deleteFile + ) } app.get( diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index ac3dccec1f..bfc34314e9 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -5,7 +5,7 @@ const { callbackify } = require('node:util') const safeExec = require('./SafeExec').promises const { ConversionError } = require('./Errors') -const APPROVED_FORMATS = ['png'] +const APPROVED_FORMATS = ['png', 'jpg'] const FOURTY_SECONDS = 40 * 1000 const KILL_SIGNAL = 'SIGTERM' @@ -34,16 +34,14 @@ async function convert(sourcePath, requestedFormat) { } async function thumbnail(sourcePath) { - const width = '260x' - return await convert(sourcePath, 'png', [ + const width = '548x' + return await _convert(sourcePath, 'jpg', [ 'convert', '-flatten', '-background', 'white', '-density', '300', - '-define', - `pdf:fit-page=${width}`, `${sourcePath}[0]`, '-resize', width, @@ -51,16 +49,14 @@ async function thumbnail(sourcePath) { } async function preview(sourcePath) { - const width = '548x' - return await convert(sourcePath, 'png', [ + const width = '794x' + return await _convert(sourcePath, 'jpg', [ 'convert', '-flatten', '-background', 'white', '-density', '300', - '-define', - `pdf:fit-page=${width}`, `${sourcePath}[0]`, '-resize', width, diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 2ed28bd435..0c092c85cd 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -150,7 +150,9 @@ async function _getConvertedFileAndCache(bucket, key, convertedKey, opts) { let convertedFsPath try { convertedFsPath = await _convertFile(bucket, key, opts) - await ImageOptimiser.promises.compressPng(convertedFsPath) + if (convertedFsPath.toLowerCase().endsWith(".png")) { + await ImageOptimiser.promises.compressPng(convertedFsPath) + } await PersistorManager.sendFile(bucket, convertedKey, convertedFsPath) } catch (err) { LocalFileWriter.deleteFile(convertedFsPath, () => {}) diff --git a/services/history-v1/docker-compose.ci.yml b/services/history-v1/docker-compose.ci.yml index da664d6b30..cf6ec3357d 100644 --- a/services/history-v1/docker-compose.ci.yml +++ b/services/history-v1/docker-compose.ci.yml @@ -75,7 +75,7 @@ services: retries: 20 mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/history-v1/docker-compose.yml b/services/history-v1/docker-compose.yml index 22b739abf9..3a33882d28 100644 --- a/services/history-v1/docker-compose.yml +++ b/services/history-v1/docker-compose.yml @@ -83,7 +83,7 @@ services: retries: 20 mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs index 0ccadaf5a9..2e12328e5c 100644 --- a/services/history-v1/storage/scripts/back_fill_file_hash.mjs +++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs @@ -150,10 +150,6 @@ const CONCURRENT_BATCHES = parseInt(process.env.CONCURRENT_BATCHES || '2', 10) const RETRIES = parseInt(process.env.RETRIES || '10', 10) const RETRY_DELAY_MS = parseInt(process.env.RETRY_DELAY_MS || '100', 10) -const USER_FILES_BUCKET_NAME = process.env.USER_FILES_BUCKET_NAME || '' -if (!USER_FILES_BUCKET_NAME) { - throw new Error('env var USER_FILES_BUCKET_NAME is missing') -} const RETRY_FILESTORE_404 = process.env.RETRY_FILESTORE_404 === 'true' const BUFFER_DIR = fs.mkdtempSync( process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-' diff --git a/services/history-v1/storage/scripts/back_fill_file_hash_fix_up.mjs b/services/history-v1/storage/scripts/back_fill_file_hash_fix_up.mjs index 7bab794692..2525ee1d6e 100644 --- a/services/history-v1/storage/scripts/back_fill_file_hash_fix_up.mjs +++ b/services/history-v1/storage/scripts/back_fill_file_hash_fix_up.mjs @@ -9,15 +9,12 @@ import { Blob } from 'overleaf-editor-core' import { BlobStore, getStringLengthOfFile, - GLOBAL_BLOBS, makeBlobForFile, } from '../lib/blob_store/index.js' import { db } from '../lib/mongodb.js' import commandLineArgs from 'command-line-args' import readline from 'node:readline' -import { _blobIsBackedUp, backupBlob } from '../lib/backupBlob.mjs' import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' -import filestorePersistor from '../lib/persistor.js' import { setTimeout } from 'node:timers/promises' // Silence warning. @@ -52,12 +49,11 @@ ObjectId.cacheHexString = true */ /** - * @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_DELETE_PERMISSION: boolean, FIX_MISSING_HASH: boolean, LOGS: string}} + * @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_MISSING_HASH: boolean, LOGS: string}} */ function parseArgs() { const args = commandLineArgs([ { name: 'fixNotFound', type: String, defaultValue: 'true' }, - { name: 'fixDeletePermission', type: String, defaultValue: 'true' }, { name: 'fixHashMismatch', type: String, defaultValue: 'true' }, { name: 'fixMissingHash', type: String, defaultValue: 'true' }, { name: 'logs', type: String, defaultValue: '' }, @@ -74,20 +70,13 @@ function parseArgs() { } return { FIX_HASH_MISMATCH: boolVal('fixNotFound'), - FIX_DELETE_PERMISSION: boolVal('fixDeletePermission'), FIX_NOT_FOUND: boolVal('fixHashMismatch'), FIX_MISSING_HASH: boolVal('fixMissingHash'), LOGS: args.logs, } } -const { - FIX_HASH_MISMATCH, - FIX_DELETE_PERMISSION, - FIX_NOT_FOUND, - FIX_MISSING_HASH, - LOGS, -} = parseArgs() +const { FIX_HASH_MISMATCH, FIX_NOT_FOUND, FIX_MISSING_HASH, LOGS } = parseArgs() if (!LOGS) { throw new Error('--logs parameter missing') } @@ -105,6 +94,37 @@ const STREAM_HIGH_WATER_MARK = parseInt( ) const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10) +// Filestore endpoint location +const FILESTORE_HOST = process.env.FILESTORE_HOST || '127.0.0.1' +const FILESTORE_PORT = process.env.FILESTORE_PORT || '3009' + +async function fetchFromFilestore(projectId, fileId) { + const url = `http://${FILESTORE_HOST}:${FILESTORE_PORT}/project/${projectId}/file/${fileId}` + const response = await fetch(url) + if (!response.ok) { + if (response.status === 404) { + throw new NotFoundError('file not found in filestore', { + status: response.status, + }) + } + const body = await response.text() + throw new OError('fetchFromFilestore failed', { + projectId, + fileId, + status: response.status, + body, + }) + } + if (!response.body) { + throw new OError('fetchFromFilestore response has no body', { + projectId, + fileId, + status: response.status, + }) + } + return response.body +} + /** @type {ProjectsCollection} */ const projectsCollection = db.collection('projects') /** @type {DeletedProjectsCollection} */ @@ -302,19 +322,16 @@ async function setHashInMongo(projectId, fileId, hash) { * @return {Promise} */ async function importRestoredFilestoreFile(projectId, fileId, historyId) { - const filestoreKey = `${projectId}/${fileId}` const path = `${BUFFER_DIR}/${projectId}_${fileId}` try { let s try { - s = await filestorePersistor.getObjectStream( - USER_FILES_BUCKET_NAME, - filestoreKey - ) + s = await fetchFromFilestore(projectId, fileId) } catch (err) { if (err instanceof NotFoundError) { throw new OError('missing blob, need to restore filestore file', { - filestoreKey, + projectId, + fileId, }) } throw err @@ -325,7 +342,6 @@ async function importRestoredFilestoreFile(projectId, fileId, historyId) { ) const blobStore = new BlobStore(historyId) const blob = await blobStore.putFile(path) - await backupBlob(historyId, blob, path) await setHashInMongo(projectId, fileId, blob.getHash()) } finally { await fs.promises.rm(path, { force: true }) @@ -339,13 +355,9 @@ async function importRestoredFilestoreFile(projectId, fileId, historyId) { * @return {Promise} */ async function bufferFilestoreFileToDisk(projectId, fileId, path) { - const filestoreKey = `${projectId}/${fileId}` try { await Stream.promises.pipeline( - await filestorePersistor.getObjectStream( - USER_FILES_BUCKET_NAME, - filestoreKey - ), + await fetchFromFilestore(projectId, fileId), fs.createWriteStream(path, { highWaterMark: STREAM_HIGH_WATER_MARK }) ) const blob = await makeBlobForFile(path) @@ -356,7 +368,8 @@ async function bufferFilestoreFileToDisk(projectId, fileId, path) { } catch (err) { if (err instanceof NotFoundError) { throw new OError('missing blob, need to restore filestore file', { - filestoreKey, + projectId, + fileId, }) } throw err @@ -389,7 +402,7 @@ async function uploadFilestoreFile(projectId, fileId) { const blob = await bufferFilestoreFileToDisk(projectId, fileId, path) const hash = blob.getHash() try { - await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) + await ensureBlobExistsForFile(projectId, fileId, hash) } catch (err) { if (!(err instanceof Blob.NotFoundError)) throw err @@ -397,7 +410,7 @@ async function uploadFilestoreFile(projectId, fileId) { const historyId = project.overleaf.history.id.toString() const blobStore = new BlobStore(historyId) await blobStore.putBlob(path, blob) - await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) + await ensureBlobExistsForFile(projectId, fileId, hash) } } finally { await fs.promises.rm(path, { force: true }) @@ -426,11 +439,7 @@ async function fixHashMismatch(line) { await importRestoredFilestoreFile(projectId, fileId, historyId) return true } - return await ensureBlobExistsForFileAndUploadToAWS( - projectId, - fileId, - computedHash - ) + return await ensureBlobExistsForFile(projectId, fileId, computedHash) } /** @@ -444,30 +453,19 @@ async function hashAlreadyUpdatedInFileTree(projectId, fileId, hash) { return fileRef.hash === hash } -/** - * @param {string} projectId - * @param {string} hash - * @return {Promise} - */ -async function needsBackingUpToAWS(projectId, hash) { - if (GLOBAL_BLOBS.has(hash)) return false - return !(await _blobIsBackedUp(projectId, hash)) -} - /** * @param {string} projectId * @param {string} fileId * @param {string} hash * @return {Promise} */ -async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) { +async function ensureBlobExistsForFile(projectId, fileId, hash) { const { project } = await getProject(projectId) const historyId = project.overleaf.history.id.toString() const blobStore = new BlobStore(historyId) if ( (await hashAlreadyUpdatedInFileTree(projectId, fileId, hash)) && - (await blobStore.getBlob(hash)) && - !(await needsBackingUpToAWS(projectId, hash)) + (await blobStore.getBlob(hash)) ) { return false // already processed } @@ -488,7 +486,7 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) { ) if (writtenBlob.getHash() !== hash) { // Double check download, better safe than sorry. - throw new OError('blob corrupted', { writtenBlob }) + throw new OError('blob corrupted', { writtenBlob, hash }) } let blob = await blobStore.getBlob(hash) @@ -497,7 +495,6 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) { // HACK: Skip upload to GCS and finalize putBlob operation directly. await blobStore.backend.insertBlob(historyId, writtenBlob) } - await backupBlob(historyId, writtenBlob, path) } finally { await fs.promises.rm(path, { force: true }) } @@ -505,16 +502,6 @@ async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) { return true } -/** - * @param {string} line - * @return {Promise} - */ -async function fixDeletePermission(line) { - let { projectId, fileId, hash } = JSON.parse(line) - if (!hash) hash = await computeFilestoreFileHash(projectId, fileId) - return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) -} - /** * @param {string} line * @return {Promise} @@ -526,7 +513,7 @@ async function fixMissingHash(line) { } = await findFile(projectId, fileId) if (hash) { // processed, double check - return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) + return await ensureBlobExistsForFile(projectId, fileId, hash) } await uploadFilestoreFile(projectId, fileId) return true @@ -543,11 +530,6 @@ const CASES = { flag: FIX_HASH_MISMATCH, action: fixHashMismatch, }, - 'delete permission': { - match: 'storage.objects.delete', - flag: FIX_DELETE_PERMISSION, - action: fixDeletePermission, - }, 'missing file hash': { match: '"bad file hash"', flag: FIX_MISSING_HASH, diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs index 62b0b1de25..b6cdd4b9bf 100644 --- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs +++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs @@ -20,7 +20,7 @@ import { makeProjectKey, } from '../../../../storage/lib/blob_store/index.js' -import express from 'express' +import { mockFilestore } from './support/MockFilestore.mjs' chai.use(chaiExclude) const TIMEOUT = 20 * 1_000 @@ -28,59 +28,6 @@ const TIMEOUT = 20 * 1_000 const projectsCollection = db.collection('projects') const deletedProjectsCollection = db.collection('deletedProjects') -class MockFilestore { - constructor() { - this.host = process.env.FILESTORE_HOST || '127.0.0.1' - this.port = process.env.FILESTORE_PORT || 3009 - // create a server listening on this.host and this.port - this.files = {} - - this.app = express() - - this.app.get('/project/:projectId/file/:fileId', (req, res) => { - const { projectId, fileId } = req.params - const content = this.files[projectId]?.[fileId] - if (!content) return res.status(404).end() - res.status(200).end(content) - }) - } - - start() { - // reset stored files - this.files = {} - // start the server - if (this.serverPromise) { - return this.serverPromise - } else { - this.serverPromise = new Promise((resolve, reject) => { - this.server = this.app.listen(this.port, this.host, err => { - if (err) return reject(err) - resolve() - }) - }) - return this.serverPromise - } - } - - addFile(projectId, fileId, fileContent) { - if (!this.files[projectId]) { - this.files[projectId] = {} - } - this.files[projectId][fileId] = fileContent - } - - deleteObject(projectId, fileId) { - if (this.files[projectId]) { - delete this.files[projectId][fileId] - if (Object.keys(this.files[projectId]).length === 0) { - delete this.files[projectId] - } - } - } -} - -const mockFilestore = new MockFilestore() - /** * @param {ObjectId} objectId * @return {string} diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash_fix_up.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash_fix_up.test.mjs index ceafa24c3a..3aa00d685a 100644 --- a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash_fix_up.test.mjs +++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash_fix_up.test.mjs @@ -1,48 +1,24 @@ import fs from 'node:fs' import Crypto from 'node:crypto' -import Stream from 'node:stream' import { promisify } from 'node:util' import { Binary, ObjectId } from 'mongodb' import { Blob } from 'overleaf-editor-core' -import { backedUpBlobs, blobs, db } from '../../../../storage/lib/mongodb.js' +import { db } from '../../../../storage/lib/mongodb.js' import cleanup from './support/cleanup.js' import testProjects from '../api/support/test_projects.js' import { execFile } from 'node:child_process' import chai, { expect } from 'chai' import chaiExclude from 'chai-exclude' -import config from 'config' -import { WritableBuffer } from '@overleaf/stream-utils' -import { - backupPersistor, - projectBlobsBucket, -} from '../../../../storage/lib/backupPersistor.mjs' -import projectKey from '../../../../storage/lib/project_key.js' -import { - BlobStore, - makeProjectKey, -} from '../../../../storage/lib/blob_store/index.js' -import ObjectPersistor from '@overleaf/object-persistor' +import { BlobStore } from '../../../../storage/lib/blob_store/index.js' +import { mockFilestore } from './support/MockFilestore.mjs' chai.use(chaiExclude) const TIMEOUT = 20 * 1_000 -const { deksBucket } = config.get('backupStore') -const { tieringStorageClass } = config.get('backupPersistor') - const projectsCollection = db.collection('projects') const deletedProjectsCollection = db.collection('deletedProjects') -const FILESTORE_PERSISTOR = ObjectPersistor({ - backend: 'gcs', - gcs: { - endpoint: { - apiEndpoint: process.env.GCS_API_ENDPOINT, - projectId: process.env.GCS_PROJECT_ID, - }, - }, -}) - /** * @param {ObjectId} objectId * @return {string} @@ -70,17 +46,6 @@ function binaryForGitBlobHash(gitBlobHash) { return new Binary(Buffer.from(gitBlobHash, 'hex')) } -async function listS3Bucket(bucket, wantStorageClass) { - const client = backupPersistor._getClientForBucket(bucket) - const response = await client.listObjectsV2({ Bucket: bucket }).promise() - - for (const object of response.Contents || []) { - expect(object).to.have.property('StorageClass', wantStorageClass) - } - - return (response.Contents || []).map(item => item.Key || '') -} - function objectIdFromTime(timestamp) { return ObjectId.createFromTime(new Date(timestamp).getTime() / 1000) } @@ -97,7 +62,6 @@ describe('back_fill_file_hash_fix_up script', function () { const historyIdDeleted0 = projectIdDeleted0.toString() const fileIdWithDifferentHashFound = objectIdFromTime('2017-02-01T00:00:00Z') const fileIdInGoodState = objectIdFromTime('2017-02-01T00:01:00Z') - const fileIdBlobExistsInGCS0 = objectIdFromTime('2017-02-01T00:02:00Z') const fileIdWithDifferentHashNotFound0 = objectIdFromTime( '2017-02-01T00:03:00Z' ) @@ -112,9 +76,6 @@ describe('back_fill_file_hash_fix_up script', function () { const fileIdWithDifferentHashRestore = objectIdFromTime( '2017-02-01T00:08:00Z' ) - const fileIdBlobExistsInGCS1 = objectIdFromTime('2017-02-01T00:09:00Z') - const fileIdRestoreFromFilestore0 = objectIdFromTime('2017-02-01T00:10:00Z') - const fileIdRestoreFromFilestore1 = objectIdFromTime('2017-02-01T00:11:00Z') const fileIdMissing2 = objectIdFromTime('2017-02-01T00:12:00Z') const fileIdHashMissing0 = objectIdFromTime('2017-02-01T00:13:00Z') const fileIdHashMissing1 = objectIdFromTime('2017-02-01T00:14:00Z') @@ -125,31 +86,11 @@ describe('back_fill_file_hash_fix_up script', function () { ) const deleteProjectsRecordId0 = new ObjectId() const writtenBlobs = [ - { - projectId: projectId0, - historyId: historyId0, - fileId: fileIdBlobExistsInGCS0, - }, - { - projectId: projectId0, - historyId: historyId0, - fileId: fileIdBlobExistsInGCS1, - }, { projectId: projectId0, historyId: historyId0, fileId: fileIdWithDifferentHashNotFound0, }, - { - projectId: projectId0, - historyId: historyId0, - fileId: fileIdRestoreFromFilestore0, - }, - { - projectId: projectId0, - historyId: historyId0, - fileId: fileIdRestoreFromFilestore1, - }, { projectId: projectId0, historyId: historyId0, @@ -200,17 +141,6 @@ describe('back_fill_file_hash_fix_up script', function () { }, msg: 'failed to process file', }, - { - projectId: projectId0, - fileId: fileIdRestoreFromFilestore0, - err: { message: 'OError: hash mismatch' }, - hash: gitBlobHash(fileIdRestoreFromFilestore0), - entry: { - ctx: { historyId: historyId0.toString() }, - hash: hashDoesNotExistAsBlob, - }, - msg: 'failed to process file', - }, { projectId: projectIdDeleted0, fileId: fileIdWithDifferentHashNotFound1, @@ -236,33 +166,6 @@ describe('back_fill_file_hash_fix_up script', function () { err: { message: 'NotFoundError' }, msg: 'failed to process file', }, - { - projectId: projectId0, - fileId: fileIdBlobExistsInGCS0, - hash: gitBlobHash(fileIdBlobExistsInGCS0), - err: { message: 'storage.objects.delete' }, - msg: 'failed to process file', - }, - { - projectId: projectId0, - fileId: fileIdBlobExistsInGCSCorrupted, - hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted), - err: { message: 'storage.objects.delete' }, - msg: 'failed to process file', - }, - { - projectId: projectId0, - fileId: fileIdBlobExistsInGCS1, - hash: gitBlobHash(fileIdBlobExistsInGCS1), - err: { message: 'storage.objects.delete' }, - msg: 'failed to process file', - }, - { - projectId: projectId0, - fileId: fileIdRestoreFromFilestore1, - err: { message: 'storage.objects.delete' }, - msg: 'failed to process file', - }, { projectId: projectIdDeleted0, fileId: fileIdMissing1, @@ -291,22 +194,23 @@ describe('back_fill_file_hash_fix_up script', function () { reason: 'bad file hash', msg: 'bad file-tree path', }, + { + projectId: projectId0, + _id: fileIdBlobExistsInGCSCorrupted, + reason: 'bad file hash', + msg: 'bad file-tree path', + }, ] if (PRINT_IDS_AND_HASHES_FOR_DEBUGGING) { const fileIds = { fileIdWithDifferentHashFound, fileIdInGoodState, - fileIdBlobExistsInGCS0, - fileIdBlobExistsInGCS1, fileIdWithDifferentHashNotFound0, fileIdWithDifferentHashNotFound1, - fileIdBlobExistsInGCSCorrupted, fileIdMissing0, fileIdMissing1, fileIdMissing2, fileIdWithDifferentHashRestore, - fileIdRestoreFromFilestore0, - fileIdRestoreFromFilestore1, fileIdHashMissing0, fileIdHashMissing1, } @@ -330,38 +234,25 @@ describe('back_fill_file_hash_fix_up script', function () { before(cleanup.everything) before('populate blobs/GCS', async function () { - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectId0}/${fileIdRestoreFromFilestore0}`, - Stream.Readable.from([fileIdRestoreFromFilestore0.toString()]) + await mockFilestore.start() + mockFilestore.addFile( + projectId0, + fileIdHashMissing0, + fileIdHashMissing0.toString() ) - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectId0}/${fileIdRestoreFromFilestore1}`, - Stream.Readable.from([fileIdRestoreFromFilestore1.toString()]) + mockFilestore.addFile( + projectId0, + fileIdHashMissing1, + fileIdHashMissing1.toString() ) - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectId0}/${fileIdHashMissing0}`, - Stream.Readable.from([fileIdHashMissing0.toString()]) - ) - await FILESTORE_PERSISTOR.sendStream( - USER_FILES_BUCKET_NAME, - `${projectId0}/${fileIdHashMissing1}`, - Stream.Readable.from([fileIdHashMissing1.toString()]) + mockFilestore.addFile( + projectId0, + fileIdBlobExistsInGCSCorrupted, + fileIdBlobExistsInGCSCorrupted.toString() ) await new BlobStore(historyId0.toString()).putString( fileIdHashMissing1.toString() // partially processed ) - await new BlobStore(historyId0.toString()).putString( - fileIdBlobExistsInGCS0.toString() - ) - await new BlobStore(historyId0.toString()).putString( - fileIdBlobExistsInGCS1.toString() - ) - await new BlobStore(historyId0.toString()).putString( - fileIdRestoreFromFilestore1.toString() - ) const path = '/tmp/test-blob-corrupted' try { await fs.promises.writeFile(path, contentCorruptedBlob) @@ -426,22 +317,10 @@ describe('back_fill_file_hash_fix_up script', function () { _id: fileIdWithDifferentHashNotFound0, hash: hashDoesNotExistAsBlob, }, - { - _id: fileIdRestoreFromFilestore0, - hash: hashDoesNotExistAsBlob, - }, - { - _id: fileIdRestoreFromFilestore1, - }, - { - _id: fileIdBlobExistsInGCS0, - hash: gitBlobHash(fileIdBlobExistsInGCS0), - }, { _id: fileIdBlobExistsInGCSCorrupted, hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted), }, - { _id: fileIdBlobExistsInGCS1 }, ], folders: [], }, @@ -546,8 +425,8 @@ describe('back_fill_file_hash_fix_up script', function () { }) it('should print stats', function () { expect(stats).to.contain({ - processedLines: 16, - success: 11, + processedLines: 12, + success: 7, alreadyProcessed: 0, fileDeleted: 0, skipped: 0, @@ -558,9 +437,9 @@ describe('back_fill_file_hash_fix_up script', function () { it('should handle re-run on same logs', async function () { ;({ stats } = await runScriptWithLogs()) expect(stats).to.contain({ - processedLines: 16, + processedLines: 12, success: 0, - alreadyProcessed: 8, + alreadyProcessed: 4, fileDeleted: 3, skipped: 0, failed: 3, @@ -663,31 +542,11 @@ describe('back_fill_file_hash_fix_up script', function () { _id: fileIdWithDifferentHashNotFound0, hash: gitBlobHash(fileIdWithDifferentHashNotFound0), }, - // Updated hash - { - _id: fileIdRestoreFromFilestore0, - hash: gitBlobHash(fileIdRestoreFromFilestore0), - }, - // Added hash - { - _id: fileIdRestoreFromFilestore1, - hash: gitBlobHash(fileIdRestoreFromFilestore1), - }, - // No change, blob created - { - _id: fileIdBlobExistsInGCS0, - hash: gitBlobHash(fileIdBlobExistsInGCS0), - }, // No change, flagged { _id: fileIdBlobExistsInGCSCorrupted, hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted), }, - // Added hash - { - _id: fileIdBlobExistsInGCS1, - hash: gitBlobHash(fileIdBlobExistsInGCS1), - }, ], folders: [], }, @@ -696,7 +555,7 @@ describe('back_fill_file_hash_fix_up script', function () { ], overleaf: { history: { id: historyId0 } }, // Incremented when removing file/updating hash - version: 8, + version: 5, }, ]) expect(await deletedProjectsCollection.find({}).toArray()).to.deep.equal([ @@ -745,62 +604,6 @@ describe('back_fill_file_hash_fix_up script', function () { (writtenBlobsByProject.get(projectId) || []).concat([fileId]) ) } - expect( - (await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map( - entry => { - // blobs are pushed unordered into mongo. Sort the list for consistency. - entry.blobs.sort() - return entry - } - ) - ).to.deep.equal( - Array.from(writtenBlobsByProject.entries()).map( - ([projectId, fileIds]) => { - return { - _id: projectId, - blobs: fileIds - .map(fileId => binaryForGitBlobHash(gitBlobHash(fileId))) - .sort(), - } - } - ) - ) - }) - it('should have backed up all the files', async function () { - expect(tieringStorageClass).to.exist - const objects = await listS3Bucket(projectBlobsBucket, tieringStorageClass) - expect(objects.sort()).to.deep.equal( - writtenBlobs - .map(({ historyId, fileId, hash }) => - makeProjectKey(historyId, hash || gitBlobHash(fileId)) - ) - .sort() - ) - for (let { historyId, fileId } of writtenBlobs) { - const hash = gitBlobHash(fileId.toString()) - const s = await backupPersistor.getObjectStream( - projectBlobsBucket, - makeProjectKey(historyId, hash), - { autoGunzip: true } - ) - const buf = new WritableBuffer() - await Stream.promises.pipeline(s, buf) - expect(gitBlobHashBuffer(buf.getContents())).to.equal(hash) - const id = buf.getContents().toString('utf-8') - expect(id).to.equal(fileId.toString()) - // double check we are not comparing 'undefined' or '[object Object]' above - expect(id).to.match(/^[a-f0-9]{24}$/) - } - const deks = await listS3Bucket(deksBucket, 'STANDARD') - expect(deks.sort()).to.deep.equal( - Array.from( - new Set( - writtenBlobs.map( - ({ historyId }) => projectKey.format(historyId) + '/dek' - ) - ) - ).sort() - ) }) it('should have written the back filled files to history v1', async function () { for (const { historyId, fileId } of writtenBlobs) { diff --git a/services/history-v1/test/acceptance/js/storage/support/MockFilestore.mjs b/services/history-v1/test/acceptance/js/storage/support/MockFilestore.mjs new file mode 100644 index 0000000000..55d0923c34 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/MockFilestore.mjs @@ -0,0 +1,54 @@ +import express from 'express' + +class MockFilestore { + constructor() { + this.host = process.env.FILESTORE_HOST || '127.0.0.1' + this.port = process.env.FILESTORE_PORT || 3009 + // create a server listening on this.host and this.port + this.files = {} + + this.app = express() + + this.app.get('/project/:projectId/file/:fileId', (req, res) => { + const { projectId, fileId } = req.params + const content = this.files[projectId]?.[fileId] + if (!content) return res.status(404).end() + res.status(200).end(content) + }) + } + + start() { + // reset stored files + this.files = {} + // start the server + if (this.serverPromise) { + return this.serverPromise + } else { + this.serverPromise = new Promise((resolve, reject) => { + this.server = this.app.listen(this.port, this.host, err => { + if (err) return reject(err) + resolve() + }) + }) + return this.serverPromise + } + } + + addFile(projectId, fileId, fileContent) { + if (!this.files[projectId]) { + this.files[projectId] = {} + } + this.files[projectId][fileId] = fileContent + } + + deleteObject(projectId, fileId) { + if (this.files[projectId]) { + delete this.files[projectId][fileId] + if (Object.keys(this.files[projectId]).length === 0) { + delete this.files[projectId] + } + } + } +} + +export const mockFilestore = new MockFilestore() diff --git a/services/notifications/docker-compose.ci.yml b/services/notifications/docker-compose.ci.yml index 24b57ab084..ca3303a079 100644 --- a/services/notifications/docker-compose.ci.yml +++ b/services/notifications/docker-compose.ci.yml @@ -42,7 +42,7 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/notifications/docker-compose.yml b/services/notifications/docker-compose.yml index 081bbfa002..e43e9aeef5 100644 --- a/services/notifications/docker-compose.yml +++ b/services/notifications/docker-compose.yml @@ -44,7 +44,7 @@ services: command: npm run --silent test:acceptance mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/project-history/docker-compose.ci.yml b/services/project-history/docker-compose.ci.yml index ca15f35fef..c6ec24a84b 100644 --- a/services/project-history/docker-compose.ci.yml +++ b/services/project-history/docker-compose.ci.yml @@ -55,7 +55,7 @@ services: retries: 20 mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/project-history/docker-compose.yml b/services/project-history/docker-compose.yml index eeca03de6e..dd3c6468fe 100644 --- a/services/project-history/docker-compose.yml +++ b/services/project-history/docker-compose.yml @@ -57,7 +57,7 @@ services: retries: 20 mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/references/.eslintrc b/services/references/.eslintrc new file mode 100644 index 0000000000..cc68024d9d --- /dev/null +++ b/services/references/.eslintrc @@ -0,0 +1,6 @@ +{ + "parserOptions": { + "ecmaVersion": 2022, + "sourceType": "module" + } +} diff --git a/services/references/.gitignore b/services/references/.gitignore new file mode 100644 index 0000000000..80bac793a7 --- /dev/null +++ b/services/references/.gitignore @@ -0,0 +1,5 @@ +node_modules +forever + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/references/.mocharc.json b/services/references/.mocharc.json new file mode 100644 index 0000000000..dc3280aa96 --- /dev/null +++ b/services/references/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/references/.nvmrc b/services/references/.nvmrc new file mode 100644 index 0000000000..0254b1e633 --- /dev/null +++ b/services/references/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/references/Dockerfile b/services/references/Dockerfile new file mode 100644 index 0000000000..caa6e2a31c --- /dev/null +++ b/services/references/Dockerfile @@ -0,0 +1,27 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/references + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/references/package.json /overleaf/services/references/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/references/ /overleaf/services/references/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/references/LICENSE b/services/references/LICENSE new file mode 100644 index 0000000000..ac8619dcb9 --- /dev/null +++ b/services/references/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/services/references/Makefile b/services/references/Makefile new file mode 100644 index 0000000000..e5181b46f3 --- /dev/null +++ b/services/references/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = references +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/references/README.md b/services/references/README.md new file mode 100644 index 0000000000..41844d259a --- /dev/null +++ b/services/references/README.md @@ -0,0 +1,10 @@ +overleaf/references +=============== + +An API for providing citation-keys from user bib-files + +License +======= +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. + +Based on https://github.com/overleaf/overleaf/commit/9964aebc794f9fd7ce1373ab3484f6b33b061af3 diff --git a/services/references/app.js b/services/references/app.js new file mode 100644 index 0000000000..a7da8720ed --- /dev/null +++ b/services/references/app.js @@ -0,0 +1,40 @@ +import '@overleaf/metrics/initialize.js' + +import express from 'express' +import Settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import metrics from '@overleaf/metrics' +import ReferencesAPIController from './app/js/ReferencesAPIController.js' +import bodyParser from 'body-parser' + +const app = express() +metrics.injectMetricsRoute(app) + +app.use(bodyParser.json({ limit: '2mb' })) +app.use(metrics.http.monitor(logger)) + +app.post('/project/:project_id/index', ReferencesAPIController.index) +app.get('/status', (req, res) => res.send({ status: 'references api is up' })) + +const settings = + Settings.internal && Settings.internal.references + ? Settings.internal.references + : undefined +const host = settings && settings.host ? settings.host : 'localhost' +const port = settings && settings.port ? settings.port : 3056 + +logger.debug('Listening at', { host, port }) + +const server = app.listen(port, host, function (error) { + if (error) { + throw error + } + logger.info({ host, port }, 'references HTTP server starting up') +}) + +process.on('SIGTERM', () => { + server.close(() => { + logger.info({ host, port }, 'references HTTP server closed') + metrics.close() + }) +}) diff --git a/services/references/app/js/ReferencesAPIController.js b/services/references/app/js/ReferencesAPIController.js new file mode 100644 index 0000000000..ac51ca6bbd --- /dev/null +++ b/services/references/app/js/ReferencesAPIController.js @@ -0,0 +1,42 @@ +import logger from '@overleaf/logger' +import BibtexParser from './bib2json.js' + +export default { + async index(req, res) { + const { docUrls, fullIndex } = req.body + try { + const responses = await Promise.all( + docUrls.map(async (docUrl) => { + try { + const response = await fetch(docUrl) + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`) + } + return response.text() + } catch (error) { + logger.error({ error }, "Failed to fetch document from URL: " + docUrl) + return null + } + }) + ) + const keys = [] + for (const body of responses) { + if (!body) continue + + try { + const parsedEntries = BibtexParser(body).entries + const ks = parsedEntries + .filter(entry => entry.EntryKey) + .map(entry => entry.EntryKey) + keys.push(...ks) + } catch (error) { + logger.error({ error }, "bib file skipped.") + } + } + res.status(200).json({ keys }) + } catch (error) { + logger.error({ error }, "Unexpected error during indexing process.") + res.status(500).json({ error: "Failed to process bib files." }) + } + } +} diff --git a/services/references/app/js/bib2json.js b/services/references/app/js/bib2json.js new file mode 100644 index 0000000000..99cfcf70ee --- /dev/null +++ b/services/references/app/js/bib2json.js @@ -0,0 +1,1967 @@ +/* eslint-disable */ +/** + * Parser.js + * Copyright 2012-13 Mayank Lahiri + * mlahiri@gmail.com + * Released under the BSD License. + * + * Modifications 2016 Sharelatex + * Modifications 2017-2020 Overleaf + * + * A forgiving Bibtex parser that can: + * + * (1) operate in streaming or block mode, extracting entries as dictionaries. + * (2) convert Latex special characters to UTF-8. + * (3) best-effort parse malformed entries. + * (4) run in a CommonJS environment or a browser, without any dependencies. + * (5) be advanced-compiled by Google Closure Compiler. + * + * Handwritten as a labor of love, not auto-generated from a grammar. + * + * Modes of usage: + * + * (1) Synchronous, string + * + * var entries = BibtexParser(text); + * console.log(entries); + * + * (2) Asynchronous, stream + * + * function entryCallback(entry) { console.log(entry); } + * var parser = new BibtexParser(entryCallback); + * parser.parse(chunk1); + * parser.parse(chunk2); + * ... + * + * @param {text|function(Object)} arg0 Either a Bibtex string or callback + * function for processing parsed entries. + * @param {array} allowedKeys optimization: do not output key/value pairs that are not on this allowlist + * @constructor + */ +function BibtexParser(arg0, allowedKeys) { + // Determine how this function is to be used + if (typeof arg0 === 'string') { + // Passed a string, synchronous call without 'new' + const entries = [] + function accumulator(entry) { + entries.push(entry) + } + const parser = new BibtexParser(accumulator, allowedKeys) + parser.parse(arg0) + return { + entries, + errors: parser.getErrors(), + } + } + if (typeof arg0 !== 'function') { + throw 'Invalid parser construction.' + } + this.ALLOWEDKEYS_ = allowedKeys || [] + this.reset_(arg0) + this.initMacros_() + return this +} + +/** @enum {number} */ +BibtexParser.prototype.STATES_ = { + ENTRY_OR_JUNK: 0, + OBJECT_TYPE: 1, + ENTRY_KEY: 2, + KV_KEY: 3, + EQUALS: 4, + KV_VALUE: 5, +} +BibtexParser.prototype.reset_ = function (arg0) { + /** @private */ this.DATA_ = {} + /** @private */ this.CALLBACK_ = arg0 + /** @private */ this.CHAR_ = 0 + /** @private */ this.LINE_ = 1 + /** @private */ this.CHAR_IN_LINE_ = 0 + /** @private */ this.SKIPWS_ = true + /** @private */ this.SKIPCOMMENT_ = true + /** @private */ this.SKIPKVPAIR_ = false + /** @private */ this.PARSETMP_ = {} + /** @private */ this.SKIPTILLEOL_ = false + /** @private */ this.VALBRACES_ = null + /** @private */ this.BRACETYPE_ = null + /** @private */ this.BRACECOUNT_ = 0 + /** @private */ this.STATE_ = this.STATES_.ENTRY_OR_JUNK + /** @private */ this.ERRORS_ = [] +} +/** @private */ BibtexParser.prototype.ENTRY_TYPES_ = { + inproceedings: 1, + proceedings: 2, + article: 3, + techreport: 4, + misc: 5, + mastersthesis: 6, + book: 7, + phdthesis: 8, + incollection: 9, + unpublished: 10, + inbook: 11, + manual: 12, + periodical: 13, + booklet: 14, + masterthesis: 15, + conference: 16, + /* additional fields from biblatex */ + artwork: 17, + audio: 18, + bibnote: 19, + bookinbook: 20, + collection: 21, + commentary: 22, + customa: 23, + customb: 24, + customc: 25, + customd: 26, + custome: 27, + customf: 28, + image: 29, + inreference: 30, + jurisdiction: 31, + legal: 32, + legislation: 33, + letter: 34, + movie: 35, + music: 36, + mvbook: 37, + mvcollection: 38, + mvproceedings: 39, + mvreference: 40, + online: 41, + patent: 42, + performance: 43, + reference: 44, + report: 45, + review: 46, + set: 47, + software: 48, + standard: 49, + suppbook: 50, + suppcollection: 51, + thesis: 52, + video: 53, +} +BibtexParser.prototype.initMacros_ = function () { + // macros can be extended by the user via + // @string { macroName = "macroValue" } + /** @private */ this.MACROS_ = { + jan: 'January', + feb: 'February', + mar: 'March', + apr: 'April', + may: 'May', + jun: 'June', + jul: 'July', + aug: 'August', + sep: 'September', + oct: 'October', + nov: 'November', + dec: 'December', + Jan: 'January', + Feb: 'February', + Mar: 'March', + Apr: 'April', + May: 'May', + Jun: 'June', + Jul: 'July', + Aug: 'August', + Sep: 'September', + Oct: 'October', + Nov: 'November', + Dec: 'December', + } +} + +/** + * Gets an array of all errors encountered during parsing. + * Array entries are of the format: + * [ line number, character in line, character in stream, error text ] + * + * @returns Array + * @public + */ +BibtexParser.prototype.getErrors = function () { + return this.ERRORS_ +} + +/** + * Processes a chunk of data + * @public + */ +BibtexParser.prototype.parse = function (chunk) { + for (let i = 0; i < chunk.length; i++) this.processCharacter_(chunk[i]) +} + +/** + * Logs error at current stream position. + * + * @private + */ +BibtexParser.prototype.error_ = function (text) { + this.ERRORS_.push([this.LINE_, this.CHAR_IN_LINE_, this.CHAR_, text]) +} + +/** + * Called after an entire entry has been parsed from the stream. + * Performs post-processing and invokes the entry callback pointed to by + * this.CALLBACK_. Parsed (but unprocessed) entry data is in this.DATA_. + */ +BibtexParser.prototype.processEntry_ = function () { + const data = this.DATA_ + if (data.Fields) + for (const f in data.Fields) { + let raw = data.Fields[f] + + // Convert Latex/Bibtex special characters to UTF-8 equivalents + for (let i = 0; i < this.CHARCONV_.length; i++) { + const re = this.CHARCONV_[i][0] + const rep = this.CHARCONV_[i][1] + raw = raw.replace(re, rep) + } + + // Basic substitutions + raw = raw + .replace(/[\n\r\t]/g, ' ') + .replace(/\s\s+/g, ' ') + .replace(/^\s+|\s+$/g, '') + + // Remove braces and backslashes + const len = raw.length + let processedArr = [] + for (let i = 0; i < len; i++) { + let c = raw[i] + let skip = false + if (c == '\\' && i < len - 1) c = raw[++i] + else { + if (c == '{' || c == '}') skip = true + } + if (!skip) processedArr.push(c) + } + data.Fields[f] = processedArr.join('') + processedArr = null + } + + if (data.ObjectType == 'string') { + for (const f in data.Fields) { + this.MACROS_[f] = data.Fields[f] + } + } else { + // Parsed a new Bibtex entry + this.CALLBACK_(data) + } +} + +/** + * Processes next character in the stream, invoking the callback after + * each entry has been found and processed. + * + * @private + * @param {string} c Next character in input stream + */ +BibtexParser.prototype.processCharacter_ = function (c) { + // Housekeeping + this.CHAR_++ + this.CHAR_IN_LINE_++ + if (c == '\n') { + this.LINE_++ + this.CHAR_IN_LINE_ = 1 + } + + // Convenience states for skipping whitespace when needed + if (this.SKIPTILLEOL_) { + if (c == '\n') this.SKIPTILLEOL_ = false + return + } + if (this.SKIPCOMMENT_ && c == '%') { + this.SKIPTILLEOL_ = true + return + } + if (this.SKIPWS_ && /\s/.test(c)) return + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + this.SKIPTILLEOL_ = false + + // Main state machine + let AnotherIteration = true + while (AnotherIteration) { + // console.log(this.LINE_, this.CHAR_IN_LINE_, this.STATE_, c) + AnotherIteration = false + switch (this.STATE_) { + // -- Scan for an object marker ('@') + // -- Reset temporary data structure in case previous entry was garbled + case this.STATES_.ENTRY_OR_JUNK: + if (c == '@') { + // SUCCESS: Parsed a valid start-of-object marker. + // NEXT_STATE: OBJECT_TYPE + this.STATE_ = this.STATES_.OBJECT_TYPE + this.DATA_ = { + ObjectType: '', + } + } + this.BRACETYPE_ = null + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + break + + // Start at first non-whitespace character after start-of-object '@' + // -- Accept [A-Za-z], break on non-matching character + // -- Populate this.DATA_.EntryType and this.DATA_.ObjectType + case this.STATES_.OBJECT_TYPE: + if (/[A-Za-z]/.test(c)) { + this.DATA_.ObjectType += c.toLowerCase() + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + } else { + // Break from state and validate object type + const ot = this.DATA_.ObjectType + if (ot == 'comment') { + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + } else { + if (ot == 'string') { + this.DATA_.ObjectType = ot + this.DATA_.Fields = {} + this.BRACETYPE_ = c + this.BRACECOUNT_ = 1 + this.STATE_ = this.STATES_.KV_KEY + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.PARSETMP_ = { + Key: '', + } + } else { + if (ot == 'preamble') { + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + } else { + if (ot in this.ENTRY_TYPES_) { + // SUCCESS: Parsed a valid object type. + // NEXT_STATE: ENTRY_KEY + this.DATA_.ObjectType = 'entry' + this.DATA_.EntryType = ot + this.DATA_.EntryKey = '' + this.STATE_ = this.STATES_.ENTRY_KEY + AnotherIteration = true + } else { + // ERROR: Unrecognized object type. + // NEXT_STATE: ENTRY_OR_JUNK + this.error_( + 'Unrecognized object type: "' + this.DATA_.ObjectType + '"' + ) + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + } + } + } + } + } + break + + // Start at first non-alphabetic character after an entry type + // -- Populate this.DATA_.EntryKey + case this.STATES_.ENTRY_KEY: + if ((c === '{' || c === '(') && this.BRACETYPE_ == null) { + this.BRACETYPE_ = c + this.BRACECOUNT_ = 1 + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + break + } + if (/[,%\s]/.test(c)) { + if (this.DATA_.EntryKey.length < 1) { + // Skip comments and whitespace before entry key + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + } else { + if (this.BRACETYPE_ == null) { + // ERROR: No opening brace for object + // NEXT_STATE: ENTRY_OR_JUNK + this.error_('No opening brace for object.') + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + } else { + // SUCCESS: Parsed an entry key + // NEXT_STATE: KV_KEY + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + AnotherIteration = true + this.STATE_ = this.STATES_.KV_KEY + this.PARSETMP_.Key = '' + this.DATA_.Fields = {} + } + } + } else { + this.DATA_.EntryKey += c + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + } + break + + // Start at first non-whitespace/comment character after entry key. + // -- Populate this.PARSETMP_.Key + case this.STATES_.KV_KEY: + // Test for end of entry + if ( + (c == '}' && this.BRACETYPE_ == '{') || + (c == ')' && this.BRACETYPE_ == '(') + ) { + // SUCCESS: Parsed an entry, possible incomplete + // NEXT_STATE: ENTRY_OR_JUNK + this.processEntry_() + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + break + } + if (/[\-A-Za-z:]/.test(c)) { + // Add to key + this.PARSETMP_.Key += c + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + } else { + // Either end of key or we haven't encountered start of key + if (this.PARSETMP_.Key.length < 1) { + // Keep going till we see a key + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + } else { + // SUCCESS: Found full key in K/V pair + // NEXT_STATE: EQUALS + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.EQUALS + AnotherIteration = true + + if (this.DATA_.ObjectType !== 'string') { + // this entry is not a macro + // normalize the key to lower case + this.PARSETMP_.Key = this.PARSETMP_.Key.toLowerCase() + + // optimization: skip key/value pairs that are not on the allowlist + this.SKIPKVPAIR_ = + // has allowedKeys set + this.ALLOWEDKEYS_.length && + // key is not on the allowlist + this.ALLOWEDKEYS_.indexOf(this.PARSETMP_.Key) === -1 + } else { + this.SKIPKVPAIR_ = false + } + } + } + break + + // Start at first non-alphabetic character after K/V pair key. + case this.STATES_.EQUALS: + if ( + (c == '}' && this.BRACETYPE_ == '{') || + (c == ')' && this.BRACETYPE_ == '(') + ) { + // ERROR: K/V pair with key but no value + // NEXT_STATE: ENTRY_OR_JUNK + this.error_( + 'Key-value pair has key "' + this.PARSETMP_.Key + '", but no value.' + ) + this.processEntry_() + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + break + } + if (c == '=') { + // SUCCESS: found an equal signs separating key and value + // NEXT_STATE: KV_VALUE + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.KV_VALUE + this.PARSETMP_.Value = [] + this.VALBRACES_ = { '"': [], '{': [] } + } + break + + // Start at first non-whitespace/comment character after '=' + // -- Populate this.PARSETMP_.Value + case this.STATES_.KV_VALUE: + const delim = this.VALBRACES_ + // valueCharsArray is the list of characters that make up the + // current value + const valueCharsArray = this.PARSETMP_.Value + let doneParsingValue = false + + // Test for special characters + if (c == '"' || c == '{' || c == '}' || c == ',') { + if (c == ',') { + // This comma can mean: + // (1) just another comma literal + // (2) end of a macro reference + if (delim['"'].length + delim['{'].length === 0) { + // end of a macro reference + const macro = this.PARSETMP_.Value.join('').trim() + if (macro in this.MACROS_) { + // Successful macro reference + this.PARSETMP_.Value = [this.MACROS_[macro]] + } else { + // Reference to an undefined macro + this.error_('Reference to an undefined macro: ' + macro) + } + doneParsingValue = true + } + } + if (c == '"') { + // This quote can mean: + // (1) opening delimiter + // (2) closing delimiter + // (3) literal, if we have a '{' on the stack + if (delim['"'].length + delim['{'].length === 0) { + // opening delimiter + delim['"'].push(this.CHAR_) + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + break + } + if ( + delim['"'].length == 1 && + delim['{'].length == 0 && + (valueCharsArray.length == 0 || + valueCharsArray[valueCharsArray.length - 1] != '\\') + ) { + // closing delimiter + doneParsingValue = true + } else { + // literal, add to value + } + } + if (c == '{') { + // This brace can mean: + // (1) opening delimiter + // (2) stacked verbatim delimiter + if ( + valueCharsArray.length == 0 || + valueCharsArray[valueCharsArray.length - 1] != '\\' + ) { + delim['{'].push(this.CHAR_) + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + } else { + // literal, add to value + } + } + if (c == '}') { + // This brace can mean: + // (1) closing delimiter + // (2) closing stacked verbatim delimiter + // (3) end of object definition if value was a macro + if (delim['"'].length + delim['{'].length === 0) { + // end of object definition, after macro + const macro = this.PARSETMP_.Value.join('').trim() + if (macro in this.MACROS_) { + // Successful macro reference + this.PARSETMP_.Value = [this.MACROS_[macro]] + } else { + // Reference to an undefined macro + this.error_('Reference to an undefined macro: ' + macro) + } + AnotherIteration = true + doneParsingValue = true + } else { + // sometimes imported bibs will have {\},{\\}, {\\\}, {\\\\}, etc for whitespace, + // which would otherwise break the parsing. we watch for these occurences of + // 1+ backslashes in an empty bracket pair to gracefully handle the malformed bib file + const doubleSlash = + valueCharsArray.length >= 2 && + valueCharsArray[valueCharsArray.length - 1] === '\\' && // for \\} + valueCharsArray[valueCharsArray.length - 2] === '\\' + const singleSlash = + valueCharsArray.length >= 2 && + valueCharsArray[valueCharsArray.length - 1] === '\\' && // for {\} + valueCharsArray[valueCharsArray.length - 2] === '{' + + if ( + valueCharsArray.length == 0 || + valueCharsArray[valueCharsArray.length - 1] != '\\' || // for } + doubleSlash || + singleSlash + ) { + if (delim['{'].length > 0) { + // pop stack for stacked verbatim delimiter + delim['{'].splice(delim['{'].length - 1, 1) + if (delim['{'].length + delim['"'].length == 0) { + // closing delimiter + doneParsingValue = true + } else { + // end verbatim block + } + } + } else { + // literal, add to value + } + } + } + } + + // If here, then we are either done parsing the value or + // have a literal that should be added to the value. + if (doneParsingValue) { + // SUCCESS: value parsed + // NEXT_STATE: KV_KEY + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.KV_KEY + if (!this.SKIPKVPAIR_) { + this.DATA_.Fields[this.PARSETMP_.Key] = + this.PARSETMP_.Value.join('') + } + this.PARSETMP_ = { Key: '' } + this.VALBRACES_ = null + } else { + this.PARSETMP_.Value.push(c) + if (this.PARSETMP_.Value.length >= 1000 * 20) { + this.PARSETMP_.Value = [] + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + this.DATA_ = { ObjectType: '' } + this.BRACETYPE_ = null + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + } + } + break + } // end switch (this.STATE_) + } // end while(AnotherIteration) +} // end function processCharacter + +/** @private */ BibtexParser.prototype.CHARCONV_ = [ + [/\\space /g, '\u0020'], + [/\\textdollar /g, '\u0024'], + [/\\textquotesingle /g, '\u0027'], + [/\\ast /g, '\u002A'], + [/\\textbackslash /g, '\u005C'], + [/\\\^\{\}/g, '\u005E'], + [/\\textasciigrave /g, '\u0060'], + [/\\lbrace /g, '\u007B'], + [/\\vert /g, '\u007C'], + [/\\rbrace /g, '\u007D'], + [/\\textasciitilde /g, '\u007E'], + [/\\textexclamdown /g, '\u00A1'], + [/\\textcent /g, '\u00A2'], + [/\\textsterling /g, '\u00A3'], + [/\\textcurrency /g, '\u00A4'], + [/\\textyen /g, '\u00A5'], + [/\\textbrokenbar /g, '\u00A6'], + [/\\textsection /g, '\u00A7'], + [/\\textasciidieresis /g, '\u00A8'], + [/\\textcopyright /g, '\u00A9'], + [/\\textordfeminine /g, '\u00AA'], + [/\\guillemotleft /g, '\u00AB'], + [/\\lnot /g, '\u00AC'], + [/\\textregistered /g, '\u00AE'], + [/\\textasciimacron /g, '\u00AF'], + [/\\textdegree /g, '\u00B0'], + [/\\pm /g, '\u00B1'], + [/\\textasciiacute /g, '\u00B4'], + [/\\mathrm\{\\mu\}/g, '\u00B5'], + [/\\textparagraph /g, '\u00B6'], + [/\\cdot /g, '\u00B7'], + [/\\c\{\}/g, '\u00B8'], + [/\\textordmasculine /g, '\u00BA'], + [/\\guillemotright /g, '\u00BB'], + [/\\textonequarter /g, '\u00BC'], + [/\\textonehalf /g, '\u00BD'], + [/\\textthreequarters /g, '\u00BE'], + [/\\textquestiondown /g, '\u00BF'], + [/\\`\{A\}/g, '\u00C0'], + [/\\'\{A\}/g, '\u00C1'], + [/\\\^\{A\}/g, '\u00C2'], + [/\\~\{A\}/g, '\u00C3'], + [/\\"\{A\}/g, '\u00C4'], + [/\\AA /g, '\u00C5'], + [/\\AE /g, '\u00C6'], + [/\\c\{C\}/g, '\u00C7'], + [/\\`\{E\}/g, '\u00C8'], + [/\\'\{E\}/g, '\u00C9'], + [/\\\^\{E\}/g, '\u00CA'], + [/\\"\{E\}/g, '\u00CB'], + [/\\`\{I\}/g, '\u00CC'], + [/\\'\{I\}/g, '\u00CD'], + [/\\\^\{I\}/g, '\u00CE'], + [/\\"\{I\}/g, '\u00CF'], + [/\\DH /g, '\u00D0'], + [/\\~\{N\}/g, '\u00D1'], + [/\\`\{O\}/g, '\u00D2'], + [/\\'\{O\}/g, '\u00D3'], + [/\\\^\{O\}/g, '\u00D4'], + [/\\~\{O\}/g, '\u00D5'], + [/\\"\{O\}/g, '\u00D6'], + [/\\texttimes /g, '\u00D7'], + [/\\O /g, '\u00D8'], + [/\\`\{U\}/g, '\u00D9'], + [/\\'\{U\}/g, '\u00DA'], + [/\\\^\{U\}/g, '\u00DB'], + [/\\"\{U\}/g, '\u00DC'], + [/\\'\{Y\}/g, '\u00DD'], + [/\\TH /g, '\u00DE'], + [/\\ss /g, '\u00DF'], + [/\\`\{a\}/g, '\u00E0'], + [/\\'\{a\}/g, '\u00E1'], + [/\\\^\{a\}/g, '\u00E2'], + [/\\~\{a\}/g, '\u00E3'], + [/\\"\{a\}/g, '\u00E4'], + [/\\aa /g, '\u00E5'], + [/\\ae /g, '\u00E6'], + [/\\c\{c\}/g, '\u00E7'], + [/\\`\{e\}/g, '\u00E8'], + [/\\'\{e\}/g, '\u00E9'], + [/\\\^\{e\}/g, '\u00EA'], + [/\\"\{e\}/g, '\u00EB'], + [/\\`\{\\i\}/g, '\u00EC'], + [/\\'\{\\i\}/g, '\u00ED'], + [/\\\^\{\\i\}/g, '\u00EE'], + [/\\"\{\\i\}/g, '\u00EF'], + [/\\dh /g, '\u00F0'], + [/\\~\{n\}/g, '\u00F1'], + [/\\`\{o\}/g, '\u00F2'], + [/\\'\{o\}/g, '\u00F3'], + [/\\\^\{o\}/g, '\u00F4'], + [/\\~\{o\}/g, '\u00F5'], + [/\\"\{o\}/g, '\u00F6'], + [/\\div /g, '\u00F7'], + [/\\o /g, '\u00F8'], + [/\\`\{u\}/g, '\u00F9'], + [/\\'\{u\}/g, '\u00FA'], + [/\\\^\{u\}/g, '\u00FB'], + [/\\"\{u\}/g, '\u00FC'], + [/\\'\{y\}/g, '\u00FD'], + [/\\th /g, '\u00FE'], + [/\\"\{y\}/g, '\u00FF'], + [/\\=\{A\}/g, '\u0100'], + [/\\=\{a\}/g, '\u0101'], + [/\\u\{A\}/g, '\u0102'], + [/\\u\{a\}/g, '\u0103'], + [/\\k\{A\}/g, '\u0104'], + [/\\k\{a\}/g, '\u0105'], + [/\\'\{C\}/g, '\u0106'], + [/\\'\{c\}/g, '\u0107'], + [/\\\^\{C\}/g, '\u0108'], + [/\\\^\{c\}/g, '\u0109'], + [/\\.\{C\}/g, '\u010A'], + [/\\.\{c\}/g, '\u010B'], + [/\\v\{C\}/g, '\u010C'], + [/\\v\{c\}/g, '\u010D'], + [/\\v\{D\}/g, '\u010E'], + [/\\v\{d\}/g, '\u010F'], + [/\\DJ /g, '\u0110'], + [/\\dj /g, '\u0111'], + [/\\=\{E\}/g, '\u0112'], + [/\\=\{e\}/g, '\u0113'], + [/\\u\{E\}/g, '\u0114'], + [/\\u\{e\}/g, '\u0115'], + [/\\.\{E\}/g, '\u0116'], + [/\\.\{e\}/g, '\u0117'], + [/\\k\{E\}/g, '\u0118'], + [/\\k\{e\}/g, '\u0119'], + [/\\v\{E\}/g, '\u011A'], + [/\\v\{e\}/g, '\u011B'], + [/\\\^\{G\}/g, '\u011C'], + [/\\\^\{g\}/g, '\u011D'], + [/\\u\{G\}/g, '\u011E'], + [/\\u\{g\}/g, '\u011F'], + [/\\.\{G\}/g, '\u0120'], + [/\\.\{g\}/g, '\u0121'], + [/\\c\{G\}/g, '\u0122'], + [/\\c\{g\}/g, '\u0123'], + [/\\\^\{H\}/g, '\u0124'], + [/\\\^\{h\}/g, '\u0125'], + [/\\Elzxh /g, '\u0127'], + [/\\~\{I\}/g, '\u0128'], + [/\\~\{\\i\}/g, '\u0129'], + [/\\=\{I\}/g, '\u012A'], + [/\\=\{\\i\}/g, '\u012B'], + [/\\u\{I\}/g, '\u012C'], + [/\\u\{\\i\}/g, '\u012D'], + [/\\k\{I\}/g, '\u012E'], + [/\\k\{i\}/g, '\u012F'], + [/\\.\{I\}/g, '\u0130'], + [/\\i /g, '\u0131'], + [/\\\^\{J\}/g, '\u0134'], + [/\\\^\{\\j\}/g, '\u0135'], + [/\\c\{K\}/g, '\u0136'], + [/\\c\{k\}/g, '\u0137'], + [/\\'\{L\}/g, '\u0139'], + [/\\'\{l\}/g, '\u013A'], + [/\\c\{L\}/g, '\u013B'], + [/\\c\{l\}/g, '\u013C'], + [/\\v\{L\}/g, '\u013D'], + [/\\v\{l\}/g, '\u013E'], + [/\\L /g, '\u0141'], + [/\\l /g, '\u0142'], + [/\\'\{N\}/g, '\u0143'], + [/\\'\{n\}/g, '\u0144'], + [/\\c\{N\}/g, '\u0145'], + [/\\c\{n\}/g, '\u0146'], + [/\\v\{N\}/g, '\u0147'], + [/\\v\{n\}/g, '\u0148'], + [/\\NG /g, '\u014A'], + [/\\ng /g, '\u014B'], + [/\\=\{O\}/g, '\u014C'], + [/\\=\{o\}/g, '\u014D'], + [/\\u\{O\}/g, '\u014E'], + [/\\u\{o\}/g, '\u014F'], + [/\\H\{O\}/g, '\u0150'], + [/\\H\{o\}/g, '\u0151'], + [/\\OE /g, '\u0152'], + [/\\oe /g, '\u0153'], + [/\\'\{R\}/g, '\u0154'], + [/\\'\{r\}/g, '\u0155'], + [/\\c\{R\}/g, '\u0156'], + [/\\c\{r\}/g, '\u0157'], + [/\\v\{R\}/g, '\u0158'], + [/\\v\{r\}/g, '\u0159'], + [/\\'\{S\}/g, '\u015A'], + [/\\'\{s\}/g, '\u015B'], + [/\\\^\{S\}/g, '\u015C'], + [/\\\^\{s\}/g, '\u015D'], + [/\\c\{S\}/g, '\u015E'], + [/\\c\{s\}/g, '\u015F'], + [/\\v\{S\}/g, '\u0160'], + [/\\v\{s\}/g, '\u0161'], + [/\\c\{T\}/g, '\u0162'], + [/\\c\{t\}/g, '\u0163'], + [/\\v\{T\}/g, '\u0164'], + [/\\v\{t\}/g, '\u0165'], + [/\\~\{U\}/g, '\u0168'], + [/\\~\{u\}/g, '\u0169'], + [/\\=\{U\}/g, '\u016A'], + [/\\=\{u\}/g, '\u016B'], + [/\\u\{U\}/g, '\u016C'], + [/\\u\{u\}/g, '\u016D'], + [/\\r\{U\}/g, '\u016E'], + [/\\r\{u\}/g, '\u016F'], + [/\\H\{U\}/g, '\u0170'], + [/\\H\{u\}/g, '\u0171'], + [/\\k\{U\}/g, '\u0172'], + [/\\k\{u\}/g, '\u0173'], + [/\\\^\{W\}/g, '\u0174'], + [/\\\^\{w\}/g, '\u0175'], + [/\\\^\{Y\}/g, '\u0176'], + [/\\\^\{y\}/g, '\u0177'], + [/\\"\{Y\}/g, '\u0178'], + [/\\'\{Z\}/g, '\u0179'], + [/\\'\{z\}/g, '\u017A'], + [/\\.\{Z\}/g, '\u017B'], + [/\\.\{z\}/g, '\u017C'], + [/\\v\{Z\}/g, '\u017D'], + [/\\v\{z\}/g, '\u017E'], + [/\\texthvlig /g, '\u0195'], + [/\\textnrleg /g, '\u019E'], + [/\\eth /g, '\u01AA'], + [/\\textdoublepipe /g, '\u01C2'], + [/\\'\{g\}/g, '\u01F5'], + [/\\Elztrna /g, '\u0250'], + [/\\Elztrnsa /g, '\u0252'], + [/\\Elzopeno /g, '\u0254'], + [/\\Elzrtld /g, '\u0256'], + [/\\Elzschwa /g, '\u0259'], + [/\\varepsilon /g, '\u025B'], + [/\\Elzpgamma /g, '\u0263'], + [/\\Elzpbgam /g, '\u0264'], + [/\\Elztrnh /g, '\u0265'], + [/\\Elzbtdl /g, '\u026C'], + [/\\Elzrtll /g, '\u026D'], + [/\\Elztrnm /g, '\u026F'], + [/\\Elztrnmlr /g, '\u0270'], + [/\\Elzltlmr /g, '\u0271'], + [/\\Elzltln /g, '\u0272'], + [/\\Elzrtln /g, '\u0273'], + [/\\Elzclomeg /g, '\u0277'], + [/\\textphi /g, '\u0278'], + [/\\Elztrnr /g, '\u0279'], + [/\\Elztrnrl /g, '\u027A'], + [/\\Elzrttrnr /g, '\u027B'], + [/\\Elzrl /g, '\u027C'], + [/\\Elzrtlr /g, '\u027D'], + [/\\Elzfhr /g, '\u027E'], + [/\\Elzrtls /g, '\u0282'], + [/\\Elzesh /g, '\u0283'], + [/\\Elztrnt /g, '\u0287'], + [/\\Elzrtlt /g, '\u0288'], + [/\\Elzpupsil /g, '\u028A'], + [/\\Elzpscrv /g, '\u028B'], + [/\\Elzinvv /g, '\u028C'], + [/\\Elzinvw /g, '\u028D'], + [/\\Elztrny /g, '\u028E'], + [/\\Elzrtlz /g, '\u0290'], + [/\\Elzyogh /g, '\u0292'], + [/\\Elzglst /g, '\u0294'], + [/\\Elzreglst /g, '\u0295'], + [/\\Elzinglst /g, '\u0296'], + [/\\textturnk /g, '\u029E'], + [/\\Elzdyogh /g, '\u02A4'], + [/\\Elztesh /g, '\u02A7'], + [/\\textasciicaron /g, '\u02C7'], + [/\\Elzverts /g, '\u02C8'], + [/\\Elzverti /g, '\u02CC'], + [/\\Elzlmrk /g, '\u02D0'], + [/\\Elzhlmrk /g, '\u02D1'], + [/\\Elzsbrhr /g, '\u02D2'], + [/\\Elzsblhr /g, '\u02D3'], + [/\\Elzrais /g, '\u02D4'], + [/\\Elzlow /g, '\u02D5'], + [/\\textasciibreve /g, '\u02D8'], + [/\\textperiodcentered /g, '\u02D9'], + [/\\r\{\}/g, '\u02DA'], + [/\\k\{\}/g, '\u02DB'], + [/\\texttildelow /g, '\u02DC'], + [/\\H\{\}/g, '\u02DD'], + [/\\tone\{55\}/g, '\u02E5'], + [/\\tone\{44\}/g, '\u02E6'], + [/\\tone\{33\}/g, '\u02E7'], + [/\\tone\{22\}/g, '\u02E8'], + [/\\tone\{11\}/g, '\u02E9'], + [/\\cyrchar\\C/g, '\u030F'], + [/\\Elzpalh /g, '\u0321'], + [/\\Elzrh /g, '\u0322'], + [/\\Elzsbbrg /g, '\u032A'], + [/\\Elzxl /g, '\u0335'], + [/\\Elzbar /g, '\u0336'], + [/\\'\{A\}/g, '\u0386'], + [/\\'\{E\}/g, '\u0388'], + [/\\'\{H\}/g, '\u0389'], + [/\\'\{\}\{I\}/g, '\u038A'], + [/\\'\{\}O/g, '\u038C'], + [/\\mathrm\{'Y\}/g, '\u038E'], + [/\\mathrm\{'\\Omega\}/g, '\u038F'], + [/\\acute\{\\ddot\{\\iota\}\}/g, '\u0390'], + [/\\Alpha /g, '\u0391'], + [/\\Beta /g, '\u0392'], + [/\\Gamma /g, '\u0393'], + [/\\Delta /g, '\u0394'], + [/\\Epsilon /g, '\u0395'], + [/\\Zeta /g, '\u0396'], + [/\\Eta /g, '\u0397'], + [/\\Theta /g, '\u0398'], + [/\\Iota /g, '\u0399'], + [/\\Kappa /g, '\u039A'], + [/\\Lambda /g, '\u039B'], + [/\\Xi /g, '\u039E'], + [/\\Pi /g, '\u03A0'], + [/\\Rho /g, '\u03A1'], + [/\\Sigma /g, '\u03A3'], + [/\\Tau /g, '\u03A4'], + [/\\Upsilon /g, '\u03A5'], + [/\\Phi /g, '\u03A6'], + [/\\Chi /g, '\u03A7'], + [/\\Psi /g, '\u03A8'], + [/\\Omega /g, '\u03A9'], + [/\\mathrm\{\\ddot\{I\}\}/g, '\u03AA'], + [/\\mathrm\{\\ddot\{Y\}\}/g, '\u03AB'], + [/\\'\{\$\\alpha\$\}/g, '\u03AC'], + [/\\acute\{\\epsilon\}/g, '\u03AD'], + [/\\acute\{\\eta\}/g, '\u03AE'], + [/\\acute\{\\iota\}/g, '\u03AF'], + [/\\acute\{\\ddot\{\\upsilon\}\}/g, '\u03B0'], + [/\\alpha /g, '\u03B1'], + [/\\beta /g, '\u03B2'], + [/\\gamma /g, '\u03B3'], + [/\\delta /g, '\u03B4'], + [/\\epsilon /g, '\u03B5'], + [/\\zeta /g, '\u03B6'], + [/\\eta /g, '\u03B7'], + [/\\texttheta /g, '\u03B8'], + [/\\iota /g, '\u03B9'], + [/\\kappa /g, '\u03BA'], + [/\\lambda /g, '\u03BB'], + [/\\mu /g, '\u03BC'], + [/\\nu /g, '\u03BD'], + [/\\xi /g, '\u03BE'], + [/\\pi /g, '\u03C0'], + [/\\rho /g, '\u03C1'], + [/\\varsigma /g, '\u03C2'], + [/\\sigma /g, '\u03C3'], + [/\\tau /g, '\u03C4'], + [/\\upsilon /g, '\u03C5'], + [/\\varphi /g, '\u03C6'], + [/\\chi /g, '\u03C7'], + [/\\psi /g, '\u03C8'], + [/\\omega /g, '\u03C9'], + [/\\ddot\{\\iota\}/g, '\u03CA'], + [/\\ddot\{\\upsilon\}/g, '\u03CB'], + [/\\'\{o\}/g, '\u03CC'], + [/\\acute\{\\upsilon\}/g, '\u03CD'], + [/\\acute\{\\omega\}/g, '\u03CE'], + [/\\Pisymbol\{ppi022\}\{87\}/g, '\u03D0'], + [/\\textvartheta /g, '\u03D1'], + [/\\Upsilon /g, '\u03D2'], + [/\\phi /g, '\u03D5'], + [/\\varpi /g, '\u03D6'], + [/\\Stigma /g, '\u03DA'], + [/\\Digamma /g, '\u03DC'], + [/\\digamma /g, '\u03DD'], + [/\\Koppa /g, '\u03DE'], + [/\\Sampi /g, '\u03E0'], + [/\\varkappa /g, '\u03F0'], + [/\\varrho /g, '\u03F1'], + [/\\textTheta /g, '\u03F4'], + [/\\backepsilon /g, '\u03F6'], + [/\\cyrchar\\CYRYO /g, '\u0401'], + [/\\cyrchar\\CYRDJE /g, '\u0402'], + [/\\cyrchar\{\\'\\CYRG\}/g, '\u0403'], + [/\\cyrchar\\CYRIE /g, '\u0404'], + [/\\cyrchar\\CYRDZE /g, '\u0405'], + [/\\cyrchar\\CYRII /g, '\u0406'], + [/\\cyrchar\\CYRYI /g, '\u0407'], + [/\\cyrchar\\CYRJE /g, '\u0408'], + [/\\cyrchar\\CYRLJE /g, '\u0409'], + [/\\cyrchar\\CYRNJE /g, '\u040A'], + [/\\cyrchar\\CYRTSHE /g, '\u040B'], + [/\\cyrchar\{\\'\\CYRK\}/g, '\u040C'], + [/\\cyrchar\\CYRUSHRT /g, '\u040E'], + [/\\cyrchar\\CYRDZHE /g, '\u040F'], + [/\\cyrchar\\CYRA /g, '\u0410'], + [/\\cyrchar\\CYRB /g, '\u0411'], + [/\\cyrchar\\CYRV /g, '\u0412'], + [/\\cyrchar\\CYRG /g, '\u0413'], + [/\\cyrchar\\CYRD /g, '\u0414'], + [/\\cyrchar\\CYRE /g, '\u0415'], + [/\\cyrchar\\CYRZH /g, '\u0416'], + [/\\cyrchar\\CYRZ /g, '\u0417'], + [/\\cyrchar\\CYRI /g, '\u0418'], + [/\\cyrchar\\CYRISHRT /g, '\u0419'], + [/\\cyrchar\\CYRK /g, '\u041A'], + [/\\cyrchar\\CYRL /g, '\u041B'], + [/\\cyrchar\\CYRM /g, '\u041C'], + [/\\cyrchar\\CYRN /g, '\u041D'], + [/\\cyrchar\\CYRO /g, '\u041E'], + [/\\cyrchar\\CYRP /g, '\u041F'], + [/\\cyrchar\\CYRR /g, '\u0420'], + [/\\cyrchar\\CYRS /g, '\u0421'], + [/\\cyrchar\\CYRT /g, '\u0422'], + [/\\cyrchar\\CYRU /g, '\u0423'], + [/\\cyrchar\\CYRF /g, '\u0424'], + [/\\cyrchar\\CYRH /g, '\u0425'], + [/\\cyrchar\\CYRC /g, '\u0426'], + [/\\cyrchar\\CYRCH /g, '\u0427'], + [/\\cyrchar\\CYRSH /g, '\u0428'], + [/\\cyrchar\\CYRSHCH /g, '\u0429'], + [/\\cyrchar\\CYRHRDSN /g, '\u042A'], + [/\\cyrchar\\CYRERY /g, '\u042B'], + [/\\cyrchar\\CYRSFTSN /g, '\u042C'], + [/\\cyrchar\\CYREREV /g, '\u042D'], + [/\\cyrchar\\CYRYU /g, '\u042E'], + [/\\cyrchar\\CYRYA /g, '\u042F'], + [/\\cyrchar\\cyra /g, '\u0430'], + [/\\cyrchar\\cyrb /g, '\u0431'], + [/\\cyrchar\\cyrv /g, '\u0432'], + [/\\cyrchar\\cyrg /g, '\u0433'], + [/\\cyrchar\\cyrd /g, '\u0434'], + [/\\cyrchar\\cyre /g, '\u0435'], + [/\\cyrchar\\cyrzh /g, '\u0436'], + [/\\cyrchar\\cyrz /g, '\u0437'], + [/\\cyrchar\\cyri /g, '\u0438'], + [/\\cyrchar\\cyrishrt /g, '\u0439'], + [/\\cyrchar\\cyrk /g, '\u043A'], + [/\\cyrchar\\cyrl /g, '\u043B'], + [/\\cyrchar\\cyrm /g, '\u043C'], + [/\\cyrchar\\cyrn /g, '\u043D'], + [/\\cyrchar\\cyro /g, '\u043E'], + [/\\cyrchar\\cyrp /g, '\u043F'], + [/\\cyrchar\\cyrr /g, '\u0440'], + [/\\cyrchar\\cyrs /g, '\u0441'], + [/\\cyrchar\\cyrt /g, '\u0442'], + [/\\cyrchar\\cyru /g, '\u0443'], + [/\\cyrchar\\cyrf /g, '\u0444'], + [/\\cyrchar\\cyrh /g, '\u0445'], + [/\\cyrchar\\cyrc /g, '\u0446'], + [/\\cyrchar\\cyrch /g, '\u0447'], + [/\\cyrchar\\cyrsh /g, '\u0448'], + [/\\cyrchar\\cyrshch /g, '\u0449'], + [/\\cyrchar\\cyrhrdsn /g, '\u044A'], + [/\\cyrchar\\cyrery /g, '\u044B'], + [/\\cyrchar\\cyrsftsn /g, '\u044C'], + [/\\cyrchar\\cyrerev /g, '\u044D'], + [/\\cyrchar\\cyryu /g, '\u044E'], + [/\\cyrchar\\cyrya /g, '\u044F'], + [/\\cyrchar\\cyryo /g, '\u0451'], + [/\\cyrchar\\cyrdje /g, '\u0452'], + [/\\cyrchar\{\\'\\cyrg\}/g, '\u0453'], + [/\\cyrchar\\cyrie /g, '\u0454'], + [/\\cyrchar\\cyrdze /g, '\u0455'], + [/\\cyrchar\\cyrii /g, '\u0456'], + [/\\cyrchar\\cyryi /g, '\u0457'], + [/\\cyrchar\\cyrje /g, '\u0458'], + [/\\cyrchar\\cyrlje /g, '\u0459'], + [/\\cyrchar\\cyrnje /g, '\u045A'], + [/\\cyrchar\\cyrtshe /g, '\u045B'], + [/\\cyrchar\{\\'\\cyrk\}/g, '\u045C'], + [/\\cyrchar\\cyrushrt /g, '\u045E'], + [/\\cyrchar\\cyrdzhe /g, '\u045F'], + [/\\cyrchar\\CYROMEGA /g, '\u0460'], + [/\\cyrchar\\cyromega /g, '\u0461'], + [/\\cyrchar\\CYRYAT /g, '\u0462'], + [/\\cyrchar\\CYRIOTE /g, '\u0464'], + [/\\cyrchar\\cyriote /g, '\u0465'], + [/\\cyrchar\\CYRLYUS /g, '\u0466'], + [/\\cyrchar\\cyrlyus /g, '\u0467'], + [/\\cyrchar\\CYRIOTLYUS /g, '\u0468'], + [/\\cyrchar\\cyriotlyus /g, '\u0469'], + [/\\cyrchar\\CYRBYUS /g, '\u046A'], + [/\\cyrchar\\CYRIOTBYUS /g, '\u046C'], + [/\\cyrchar\\cyriotbyus /g, '\u046D'], + [/\\cyrchar\\CYRKSI /g, '\u046E'], + [/\\cyrchar\\cyrksi /g, '\u046F'], + [/\\cyrchar\\CYRPSI /g, '\u0470'], + [/\\cyrchar\\cyrpsi /g, '\u0471'], + [/\\cyrchar\\CYRFITA /g, '\u0472'], + [/\\cyrchar\\CYRIZH /g, '\u0474'], + [/\\cyrchar\\CYRUK /g, '\u0478'], + [/\\cyrchar\\cyruk /g, '\u0479'], + [/\\cyrchar\\CYROMEGARND /g, '\u047A'], + [/\\cyrchar\\cyromegarnd /g, '\u047B'], + [/\\cyrchar\\CYROMEGATITLO /g, '\u047C'], + [/\\cyrchar\\cyromegatitlo /g, '\u047D'], + [/\\cyrchar\\CYROT /g, '\u047E'], + [/\\cyrchar\\cyrot /g, '\u047F'], + [/\\cyrchar\\CYRKOPPA /g, '\u0480'], + [/\\cyrchar\\cyrkoppa /g, '\u0481'], + [/\\cyrchar\\cyrthousands /g, '\u0482'], + [/\\cyrchar\\cyrhundredthousands /g, '\u0488'], + [/\\cyrchar\\cyrmillions /g, '\u0489'], + [/\\cyrchar\\CYRSEMISFTSN /g, '\u048C'], + [/\\cyrchar\\cyrsemisftsn /g, '\u048D'], + [/\\cyrchar\\CYRRTICK /g, '\u048E'], + [/\\cyrchar\\cyrrtick /g, '\u048F'], + [/\\cyrchar\\CYRGUP /g, '\u0490'], + [/\\cyrchar\\cyrgup /g, '\u0491'], + [/\\cyrchar\\CYRGHCRS /g, '\u0492'], + [/\\cyrchar\\cyrghcrs /g, '\u0493'], + [/\\cyrchar\\CYRGHK /g, '\u0494'], + [/\\cyrchar\\cyrghk /g, '\u0495'], + [/\\cyrchar\\CYRZHDSC /g, '\u0496'], + [/\\cyrchar\\cyrzhdsc /g, '\u0497'], + [/\\cyrchar\\CYRZDSC /g, '\u0498'], + [/\\cyrchar\\cyrzdsc /g, '\u0499'], + [/\\cyrchar\\CYRKDSC /g, '\u049A'], + [/\\cyrchar\\cyrkdsc /g, '\u049B'], + [/\\cyrchar\\CYRKVCRS /g, '\u049C'], + [/\\cyrchar\\cyrkvcrs /g, '\u049D'], + [/\\cyrchar\\CYRKHCRS /g, '\u049E'], + [/\\cyrchar\\cyrkhcrs /g, '\u049F'], + [/\\cyrchar\\CYRKBEAK /g, '\u04A0'], + [/\\cyrchar\\cyrkbeak /g, '\u04A1'], + [/\\cyrchar\\CYRNDSC /g, '\u04A2'], + [/\\cyrchar\\cyrndsc /g, '\u04A3'], + [/\\cyrchar\\CYRNG /g, '\u04A4'], + [/\\cyrchar\\cyrng /g, '\u04A5'], + [/\\cyrchar\\CYRPHK /g, '\u04A6'], + [/\\cyrchar\\cyrphk /g, '\u04A7'], + [/\\cyrchar\\CYRABHHA /g, '\u04A8'], + [/\\cyrchar\\cyrabhha /g, '\u04A9'], + [/\\cyrchar\\CYRSDSC /g, '\u04AA'], + [/\\cyrchar\\cyrsdsc /g, '\u04AB'], + [/\\cyrchar\\CYRTDSC /g, '\u04AC'], + [/\\cyrchar\\cyrtdsc /g, '\u04AD'], + [/\\cyrchar\\CYRY /g, '\u04AE'], + [/\\cyrchar\\cyry /g, '\u04AF'], + [/\\cyrchar\\CYRYHCRS /g, '\u04B0'], + [/\\cyrchar\\cyryhcrs /g, '\u04B1'], + [/\\cyrchar\\CYRHDSC /g, '\u04B2'], + [/\\cyrchar\\cyrhdsc /g, '\u04B3'], + [/\\cyrchar\\CYRTETSE /g, '\u04B4'], + [/\\cyrchar\\cyrtetse /g, '\u04B5'], + [/\\cyrchar\\CYRCHRDSC /g, '\u04B6'], + [/\\cyrchar\\cyrchrdsc /g, '\u04B7'], + [/\\cyrchar\\CYRCHVCRS /g, '\u04B8'], + [/\\cyrchar\\cyrchvcrs /g, '\u04B9'], + [/\\cyrchar\\CYRSHHA /g, '\u04BA'], + [/\\cyrchar\\cyrshha /g, '\u04BB'], + [/\\cyrchar\\CYRABHCH /g, '\u04BC'], + [/\\cyrchar\\cyrabhch /g, '\u04BD'], + [/\\cyrchar\\CYRABHCHDSC /g, '\u04BE'], + [/\\cyrchar\\cyrabhchdsc /g, '\u04BF'], + [/\\cyrchar\\CYRpalochka /g, '\u04C0'], + [/\\cyrchar\\CYRKHK /g, '\u04C3'], + [/\\cyrchar\\cyrkhk /g, '\u04C4'], + [/\\cyrchar\\CYRNHK /g, '\u04C7'], + [/\\cyrchar\\cyrnhk /g, '\u04C8'], + [/\\cyrchar\\CYRCHLDSC /g, '\u04CB'], + [/\\cyrchar\\cyrchldsc /g, '\u04CC'], + [/\\cyrchar\\CYRAE /g, '\u04D4'], + [/\\cyrchar\\cyrae /g, '\u04D5'], + [/\\cyrchar\\CYRSCHWA /g, '\u04D8'], + [/\\cyrchar\\cyrschwa /g, '\u04D9'], + [/\\cyrchar\\CYRABHDZE /g, '\u04E0'], + [/\\cyrchar\\cyrabhdze /g, '\u04E1'], + [/\\cyrchar\\CYROTLD /g, '\u04E8'], + [/\\cyrchar\\cyrotld /g, '\u04E9'], + [/\\hspace\{0.6em\}/g, '\u2002'], + [/\\hspace\{1em\}/g, '\u2003'], + [/\\hspace\{0.33em\}/g, '\u2004'], + [/\\hspace\{0.25em\}/g, '\u2005'], + [/\\hspace\{0.166em\}/g, '\u2006'], + [/\\hphantom\{0\}/g, '\u2007'], + [/\\hphantom\{,\}/g, '\u2008'], + [/\\hspace\{0.167em\}/g, '\u2009'], + [/\\mkern1mu /g, '\u200A'], + [/\\textendash /g, '\u2013'], + [/\\textemdash /g, '\u2014'], + [/\\rule\{1em\}\{1pt\}/g, '\u2015'], + [/\\Vert /g, '\u2016'], + [/\\Elzreapos /g, '\u201B'], + [/\\textquotedblleft /g, '\u201C'], + [/\\textquotedblright /g, '\u201D'], + [/\\textdagger /g, '\u2020'], + [/\\textdaggerdbl /g, '\u2021'], + [/\\textbullet /g, '\u2022'], + [/\\ldots /g, '\u2026'], + [/\\textperthousand /g, '\u2030'], + [/\\textpertenthousand /g, '\u2031'], + [/\\backprime /g, '\u2035'], + [/\\guilsinglleft /g, '\u2039'], + [/\\guilsinglright /g, '\u203A'], + [/\\mkern4mu /g, '\u205F'], + [/\\nolinebreak /g, '\u2060'], + [/\\ensuremath\{\\Elzpes\}/g, '\u20A7'], + [/\\mbox\{\\texteuro\} /g, '\u20AC'], + [/\\dddot /g, '\u20DB'], + [/\\ddddot /g, '\u20DC'], + [/\\mathbb\{C\}/g, '\u2102'], + [/\\mathscr\{g\}/g, '\u210A'], + [/\\mathscr\{H\}/g, '\u210B'], + [/\\mathfrak\{H\}/g, '\u210C'], + [/\\mathbb\{H\}/g, '\u210D'], + [/\\hslash /g, '\u210F'], + [/\\mathscr\{I\}/g, '\u2110'], + [/\\mathfrak\{I\}/g, '\u2111'], + [/\\mathscr\{L\}/g, '\u2112'], + [/\\mathscr\{l\}/g, '\u2113'], + [/\\mathbb\{N\}/g, '\u2115'], + [/\\cyrchar\\textnumero /g, '\u2116'], + [/\\wp /g, '\u2118'], + [/\\mathbb\{P\}/g, '\u2119'], + [/\\mathbb\{Q\}/g, '\u211A'], + [/\\mathscr\{R\}/g, '\u211B'], + [/\\mathfrak\{R\}/g, '\u211C'], + [/\\mathbb\{R\}/g, '\u211D'], + [/\\Elzxrat /g, '\u211E'], + [/\\texttrademark /g, '\u2122'], + [/\\mathbb\{Z\}/g, '\u2124'], + [/\\Omega /g, '\u2126'], + [/\\mho /g, '\u2127'], + [/\\mathfrak\{Z\}/g, '\u2128'], + [/\\ElsevierGlyph\{2129\}/g, '\u2129'], + [/\\AA /g, '\u212B'], + [/\\mathscr\{B\}/g, '\u212C'], + [/\\mathfrak\{C\}/g, '\u212D'], + [/\\mathscr\{e\}/g, '\u212F'], + [/\\mathscr\{E\}/g, '\u2130'], + [/\\mathscr\{F\}/g, '\u2131'], + [/\\mathscr\{M\}/g, '\u2133'], + [/\\mathscr\{o\}/g, '\u2134'], + [/\\aleph /g, '\u2135'], + [/\\beth /g, '\u2136'], + [/\\gimel /g, '\u2137'], + [/\\daleth /g, '\u2138'], + [/\\textfrac\{1\}\{3\}/g, '\u2153'], + [/\\textfrac\{2\}\{3\}/g, '\u2154'], + [/\\textfrac\{1\}\{5\}/g, '\u2155'], + [/\\textfrac\{2\}\{5\}/g, '\u2156'], + [/\\textfrac\{3\}\{5\}/g, '\u2157'], + [/\\textfrac\{4\}\{5\}/g, '\u2158'], + [/\\textfrac\{1\}\{6\}/g, '\u2159'], + [/\\textfrac\{5\}\{6\}/g, '\u215A'], + [/\\textfrac\{1\}\{8\}/g, '\u215B'], + [/\\textfrac\{3\}\{8\}/g, '\u215C'], + [/\\textfrac\{5\}\{8\}/g, '\u215D'], + [/\\textfrac\{7\}\{8\}/g, '\u215E'], + [/\\leftarrow /g, '\u2190'], + [/\\uparrow /g, '\u2191'], + [/\\rightarrow /g, '\u2192'], + [/\\downarrow /g, '\u2193'], + [/\\leftrightarrow /g, '\u2194'], + [/\\updownarrow /g, '\u2195'], + [/\\nwarrow /g, '\u2196'], + [/\\nearrow /g, '\u2197'], + [/\\searrow /g, '\u2198'], + [/\\swarrow /g, '\u2199'], + [/\\nleftarrow /g, '\u219A'], + [/\\nrightarrow /g, '\u219B'], + [/\\arrowwaveright /g, '\u219C'], + [/\\arrowwaveright /g, '\u219D'], + [/\\twoheadleftarrow /g, '\u219E'], + [/\\twoheadrightarrow /g, '\u21A0'], + [/\\leftarrowtail /g, '\u21A2'], + [/\\rightarrowtail /g, '\u21A3'], + [/\\mapsto /g, '\u21A6'], + [/\\hookleftarrow /g, '\u21A9'], + [/\\hookrightarrow /g, '\u21AA'], + [/\\looparrowleft /g, '\u21AB'], + [/\\looparrowright /g, '\u21AC'], + [/\\leftrightsquigarrow /g, '\u21AD'], + [/\\nleftrightarrow /g, '\u21AE'], + [/\\Lsh /g, '\u21B0'], + [/\\Rsh /g, '\u21B1'], + [/\\ElsevierGlyph\{21B3\}/g, '\u21B3'], + [/\\curvearrowleft /g, '\u21B6'], + [/\\curvearrowright /g, '\u21B7'], + [/\\circlearrowleft /g, '\u21BA'], + [/\\circlearrowright /g, '\u21BB'], + [/\\leftharpoonup /g, '\u21BC'], + [/\\leftharpoondown /g, '\u21BD'], + [/\\upharpoonright /g, '\u21BE'], + [/\\upharpoonleft /g, '\u21BF'], + [/\\rightharpoonup /g, '\u21C0'], + [/\\rightharpoondown /g, '\u21C1'], + [/\\downharpoonright /g, '\u21C2'], + [/\\downharpoonleft /g, '\u21C3'], + [/\\rightleftarrows /g, '\u21C4'], + [/\\dblarrowupdown /g, '\u21C5'], + [/\\leftrightarrows /g, '\u21C6'], + [/\\leftleftarrows /g, '\u21C7'], + [/\\upuparrows /g, '\u21C8'], + [/\\rightrightarrows /g, '\u21C9'], + [/\\downdownarrows /g, '\u21CA'], + [/\\leftrightharpoons /g, '\u21CB'], + [/\\rightleftharpoons /g, '\u21CC'], + [/\\nLeftarrow /g, '\u21CD'], + [/\\nLeftrightarrow /g, '\u21CE'], + [/\\nRightarrow /g, '\u21CF'], + [/\\Leftarrow /g, '\u21D0'], + [/\\Uparrow /g, '\u21D1'], + [/\\Rightarrow /g, '\u21D2'], + [/\\Downarrow /g, '\u21D3'], + [/\\Leftrightarrow /g, '\u21D4'], + [/\\Updownarrow /g, '\u21D5'], + [/\\Lleftarrow /g, '\u21DA'], + [/\\Rrightarrow /g, '\u21DB'], + [/\\rightsquigarrow /g, '\u21DD'], + [/\\DownArrowUpArrow /g, '\u21F5'], + [/\\forall /g, '\u2200'], + [/\\complement /g, '\u2201'], + [/\\partial /g, '\u2202'], + [/\\exists /g, '\u2203'], + [/\\nexists /g, '\u2204'], + [/\\varnothing /g, '\u2205'], + [/\\nabla /g, '\u2207'], + [/\\in /g, '\u2208'], + [/\\not\\in /g, '\u2209'], + [/\\ni /g, '\u220B'], + [/\\not\\ni /g, '\u220C'], + [/\\prod /g, '\u220F'], + [/\\coprod /g, '\u2210'], + [/\\sum /g, '\u2211'], + [/\\mp /g, '\u2213'], + [/\\dotplus /g, '\u2214'], + [/\\setminus /g, '\u2216'], + [/\\circ /g, '\u2218'], + [/\\bullet /g, '\u2219'], + [/\\surd /g, '\u221A'], + [/\\propto /g, '\u221D'], + [/\\infty /g, '\u221E'], + [/\\rightangle /g, '\u221F'], + [/\\angle /g, '\u2220'], + [/\\measuredangle /g, '\u2221'], + [/\\sphericalangle /g, '\u2222'], + [/\\mid /g, '\u2223'], + [/\\nmid /g, '\u2224'], + [/\\parallel /g, '\u2225'], + [/\\nparallel /g, '\u2226'], + [/\\wedge /g, '\u2227'], + [/\\vee /g, '\u2228'], + [/\\cap /g, '\u2229'], + [/\\cup /g, '\u222A'], + [/\\int /g, '\u222B'], + [/\\int\\!\\int /g, '\u222C'], + [/\\int\\!\\int\\!\\int /g, '\u222D'], + [/\\oint /g, '\u222E'], + [/\\surfintegral /g, '\u222F'], + [/\\volintegral /g, '\u2230'], + [/\\clwintegral /g, '\u2231'], + [/\\ElsevierGlyph\{2232\}/g, '\u2232'], + [/\\ElsevierGlyph\{2233\}/g, '\u2233'], + [/\\therefore /g, '\u2234'], + [/\\because /g, '\u2235'], + [/\\Colon /g, '\u2237'], + [/\\ElsevierGlyph\{2238\}/g, '\u2238'], + [/\\mathbin\{\{:\}\\!\\!\{\-\}\\!\\!\{:\}\}/g, '\u223A'], + [/\\homothetic /g, '\u223B'], + [/\\sim /g, '\u223C'], + [/\\backsim /g, '\u223D'], + [/\\lazysinv /g, '\u223E'], + [/\\wr /g, '\u2240'], + [/\\not\\sim /g, '\u2241'], + [/\\ElsevierGlyph\{2242\}/g, '\u2242'], + [/\\NotEqualTilde /g, '\u2242-00338'], + [/\\simeq /g, '\u2243'], + [/\\not\\simeq /g, '\u2244'], + [/\\cong /g, '\u2245'], + [/\\approxnotequal /g, '\u2246'], + [/\\not\\cong /g, '\u2247'], + [/\\approx /g, '\u2248'], + [/\\not\\approx /g, '\u2249'], + [/\\approxeq /g, '\u224A'], + [/\\tildetrpl /g, '\u224B'], + [/\\not\\apid /g, '\u224B-00338'], + [/\\allequal /g, '\u224C'], + [/\\asymp /g, '\u224D'], + [/\\Bumpeq /g, '\u224E'], + [/\\NotHumpDownHump /g, '\u224E-00338'], + [/\\bumpeq /g, '\u224F'], + [/\\NotHumpEqual /g, '\u224F-00338'], + [/\\doteq /g, '\u2250'], + [/\\not\\doteq/g, '\u2250-00338'], + [/\\doteqdot /g, '\u2251'], + [/\\fallingdotseq /g, '\u2252'], + [/\\risingdotseq /g, '\u2253'], + [/\\eqcirc /g, '\u2256'], + [/\\circeq /g, '\u2257'], + [/\\estimates /g, '\u2259'], + [/\\ElsevierGlyph\{225A\}/g, '\u225A'], + [/\\starequal /g, '\u225B'], + [/\\triangleq /g, '\u225C'], + [/\\ElsevierGlyph\{225F\}/g, '\u225F'], + [/\\not =/g, '\u2260'], + [/\\equiv /g, '\u2261'], + [/\\not\\equiv /g, '\u2262'], + [/\\leq /g, '\u2264'], + [/\\geq /g, '\u2265'], + [/\\leqq /g, '\u2266'], + [/\\geqq /g, '\u2267'], + [/\\lneqq /g, '\u2268'], + [/\\lvertneqq /g, '\u2268-0FE00'], + [/\\gneqq /g, '\u2269'], + [/\\gvertneqq /g, '\u2269-0FE00'], + [/\\ll /g, '\u226A'], + [/\\NotLessLess /g, '\u226A-00338'], + [/\\gg /g, '\u226B'], + [/\\NotGreaterGreater /g, '\u226B-00338'], + [/\\between /g, '\u226C'], + [/\\not\\kern\-0.3em\\times /g, '\u226D'], + [/\\not/g, '\u226F'], + [/\\not\\leq /g, '\u2270'], + [/\\not\\geq /g, '\u2271'], + [/\\lessequivlnt /g, '\u2272'], + [/\\greaterequivlnt /g, '\u2273'], + [/\\ElsevierGlyph\{2274\}/g, '\u2274'], + [/\\ElsevierGlyph\{2275\}/g, '\u2275'], + [/\\lessgtr /g, '\u2276'], + [/\\gtrless /g, '\u2277'], + [/\\notlessgreater /g, '\u2278'], + [/\\notgreaterless /g, '\u2279'], + [/\\prec /g, '\u227A'], + [/\\succ /g, '\u227B'], + [/\\preccurlyeq /g, '\u227C'], + [/\\succcurlyeq /g, '\u227D'], + [/\\precapprox /g, '\u227E'], + [/\\NotPrecedesTilde /g, '\u227E-00338'], + [/\\succapprox /g, '\u227F'], + [/\\NotSucceedsTilde /g, '\u227F-00338'], + [/\\not\\prec /g, '\u2280'], + [/\\not\\succ /g, '\u2281'], + [/\\subset /g, '\u2282'], + [/\\supset /g, '\u2283'], + [/\\not\\subset /g, '\u2284'], + [/\\not\\supset /g, '\u2285'], + [/\\subseteq /g, '\u2286'], + [/\\supseteq /g, '\u2287'], + [/\\not\\subseteq /g, '\u2288'], + [/\\not\\supseteq /g, '\u2289'], + [/\\subsetneq /g, '\u228A'], + [/\\varsubsetneqq /g, '\u228A-0FE00'], + [/\\supsetneq /g, '\u228B'], + [/\\varsupsetneq /g, '\u228B-0FE00'], + [/\\uplus /g, '\u228E'], + [/\\sqsubset /g, '\u228F'], + [/\\NotSquareSubset /g, '\u228F-00338'], + [/\\sqsupset /g, '\u2290'], + [/\\NotSquareSuperset /g, '\u2290-00338'], + [/\\sqsubseteq /g, '\u2291'], + [/\\sqsupseteq /g, '\u2292'], + [/\\sqcap /g, '\u2293'], + [/\\sqcup /g, '\u2294'], + [/\\oplus /g, '\u2295'], + [/\\ominus /g, '\u2296'], + [/\\otimes /g, '\u2297'], + [/\\oslash /g, '\u2298'], + [/\\odot /g, '\u2299'], + [/\\circledcirc /g, '\u229A'], + [/\\circledast /g, '\u229B'], + [/\\circleddash /g, '\u229D'], + [/\\boxplus /g, '\u229E'], + [/\\boxminus /g, '\u229F'], + [/\\boxtimes /g, '\u22A0'], + [/\\boxdot /g, '\u22A1'], + [/\\vdash /g, '\u22A2'], + [/\\dashv /g, '\u22A3'], + [/\\top /g, '\u22A4'], + [/\\perp /g, '\u22A5'], + [/\\truestate /g, '\u22A7'], + [/\\forcesextra /g, '\u22A8'], + [/\\Vdash /g, '\u22A9'], + [/\\Vvdash /g, '\u22AA'], + [/\\VDash /g, '\u22AB'], + [/\\nvdash /g, '\u22AC'], + [/\\nvDash /g, '\u22AD'], + [/\\nVdash /g, '\u22AE'], + [/\\nVDash /g, '\u22AF'], + [/\\vartriangleleft /g, '\u22B2'], + [/\\vartriangleright /g, '\u22B3'], + [/\\trianglelefteq /g, '\u22B4'], + [/\\trianglerighteq /g, '\u22B5'], + [/\\original /g, '\u22B6'], + [/\\image /g, '\u22B7'], + [/\\multimap /g, '\u22B8'], + [/\\hermitconjmatrix /g, '\u22B9'], + [/\\intercal /g, '\u22BA'], + [/\\veebar /g, '\u22BB'], + [/\\rightanglearc /g, '\u22BE'], + [/\\ElsevierGlyph\{22C0\}/g, '\u22C0'], + [/\\ElsevierGlyph\{22C1\}/g, '\u22C1'], + [/\\bigcap /g, '\u22C2'], + [/\\bigcup /g, '\u22C3'], + [/\\diamond /g, '\u22C4'], + [/\\cdot /g, '\u22C5'], + [/\\star /g, '\u22C6'], + [/\\divideontimes /g, '\u22C7'], + [/\\bowtie /g, '\u22C8'], + [/\\ltimes /g, '\u22C9'], + [/\\rtimes /g, '\u22CA'], + [/\\leftthreetimes /g, '\u22CB'], + [/\\rightthreetimes /g, '\u22CC'], + [/\\backsimeq /g, '\u22CD'], + [/\\curlyvee /g, '\u22CE'], + [/\\curlywedge /g, '\u22CF'], + [/\\Subset /g, '\u22D0'], + [/\\Supset /g, '\u22D1'], + [/\\Cap /g, '\u22D2'], + [/\\Cup /g, '\u22D3'], + [/\\pitchfork /g, '\u22D4'], + [/\\lessdot /g, '\u22D6'], + [/\\gtrdot /g, '\u22D7'], + [/\\verymuchless /g, '\u22D8'], + [/\\verymuchgreater /g, '\u22D9'], + [/\\lesseqgtr /g, '\u22DA'], + [/\\gtreqless /g, '\u22DB'], + [/\\curlyeqprec /g, '\u22DE'], + [/\\curlyeqsucc /g, '\u22DF'], + [/\\not\\sqsubseteq /g, '\u22E2'], + [/\\not\\sqsupseteq /g, '\u22E3'], + [/\\Elzsqspne /g, '\u22E5'], + [/\\lnsim /g, '\u22E6'], + [/\\gnsim /g, '\u22E7'], + [/\\precedesnotsimilar /g, '\u22E8'], + [/\\succnsim /g, '\u22E9'], + [/\\ntriangleleft /g, '\u22EA'], + [/\\ntriangleright /g, '\u22EB'], + [/\\ntrianglelefteq /g, '\u22EC'], + [/\\ntrianglerighteq /g, '\u22ED'], + [/\\vdots /g, '\u22EE'], + [/\\cdots /g, '\u22EF'], + [/\\upslopeellipsis /g, '\u22F0'], + [/\\downslopeellipsis /g, '\u22F1'], + [/\\barwedge /g, '\u2305'], + [/\\perspcorrespond /g, '\u2306'], + [/\\lceil /g, '\u2308'], + [/\\rceil /g, '\u2309'], + [/\\lfloor /g, '\u230A'], + [/\\rfloor /g, '\u230B'], + [/\\recorder /g, '\u2315'], + [/\\mathchar"2208/g, '\u2316'], + [/\\ulcorner /g, '\u231C'], + [/\\urcorner /g, '\u231D'], + [/\\llcorner /g, '\u231E'], + [/\\lrcorner /g, '\u231F'], + [/\\frown /g, '\u2322'], + [/\\smile /g, '\u2323'], + [/\\langle /g, '\u2329'], + [/\\rangle /g, '\u232A'], + [/\\ElsevierGlyph\{E838\}/g, '\u233D'], + [/\\Elzdlcorn /g, '\u23A3'], + [/\\lmoustache /g, '\u23B0'], + [/\\rmoustache /g, '\u23B1'], + [/\\textvisiblespace /g, '\u2423'], + [/\\ding\{172\}/g, '\u2460'], + [/\\ding\{173\}/g, '\u2461'], + [/\\ding\{174\}/g, '\u2462'], + [/\\ding\{175\}/g, '\u2463'], + [/\\ding\{176\}/g, '\u2464'], + [/\\ding\{177\}/g, '\u2465'], + [/\\ding\{178\}/g, '\u2466'], + [/\\ding\{179\}/g, '\u2467'], + [/\\ding\{180\}/g, '\u2468'], + [/\\ding\{181\}/g, '\u2469'], + [/\\circledS /g, '\u24C8'], + [/\\Elzdshfnc /g, '\u2506'], + [/\\Elzsqfnw /g, '\u2519'], + [/\\diagup /g, '\u2571'], + [/\\ding\{110\}/g, '\u25A0'], + [/\\square /g, '\u25A1'], + [/\\blacksquare /g, '\u25AA'], + [/\\fbox\{~~\}/g, '\u25AD'], + [/\\Elzvrecto /g, '\u25AF'], + [/\\ElsevierGlyph\{E381\}/g, '\u25B1'], + [/\\ding\{115\}/g, '\u25B2'], + [/\\bigtriangleup /g, '\u25B3'], + [/\\blacktriangle /g, '\u25B4'], + [/\\vartriangle /g, '\u25B5'], + [/\\blacktriangleright /g, '\u25B8'], + [/\\triangleright /g, '\u25B9'], + [/\\ding\{116\}/g, '\u25BC'], + [/\\bigtriangledown /g, '\u25BD'], + [/\\blacktriangledown /g, '\u25BE'], + [/\\triangledown /g, '\u25BF'], + [/\\blacktriangleleft /g, '\u25C2'], + [/\\triangleleft /g, '\u25C3'], + [/\\ding\{117\}/g, '\u25C6'], + [/\\lozenge /g, '\u25CA'], + [/\\bigcirc /g, '\u25CB'], + [/\\ding\{108\}/g, '\u25CF'], + [/\\Elzcirfl /g, '\u25D0'], + [/\\Elzcirfr /g, '\u25D1'], + [/\\Elzcirfb /g, '\u25D2'], + [/\\ding\{119\}/g, '\u25D7'], + [/\\Elzrvbull /g, '\u25D8'], + [/\\Elzsqfl /g, '\u25E7'], + [/\\Elzsqfr /g, '\u25E8'], + [/\\Elzsqfse /g, '\u25EA'], + [/\\bigcirc /g, '\u25EF'], + [/\\ding\{72\}/g, '\u2605'], + [/\\ding\{73\}/g, '\u2606'], + [/\\ding\{37\}/g, '\u260E'], + [/\\ding\{42\}/g, '\u261B'], + [/\\ding\{43\}/g, '\u261E'], + [/\\rightmoon /g, '\u263E'], + [/\\mercury /g, '\u263F'], + [/\\venus /g, '\u2640'], + [/\\male /g, '\u2642'], + [/\\jupiter /g, '\u2643'], + [/\\saturn /g, '\u2644'], + [/\\uranus /g, '\u2645'], + [/\\neptune /g, '\u2646'], + [/\\pluto /g, '\u2647'], + [/\\aries /g, '\u2648'], + [/\\taurus /g, '\u2649'], + [/\\gemini /g, '\u264A'], + [/\\cancer /g, '\u264B'], + [/\\leo /g, '\u264C'], + [/\\virgo /g, '\u264D'], + [/\\libra /g, '\u264E'], + [/\\scorpio /g, '\u264F'], + [/\\sagittarius /g, '\u2650'], + [/\\capricornus /g, '\u2651'], + [/\\aquarius /g, '\u2652'], + [/\\pisces /g, '\u2653'], + [/\\ding\{171\}/g, '\u2660'], + [/\\diamond /g, '\u2662'], + [/\\ding\{168\}/g, '\u2663'], + [/\\ding\{170\}/g, '\u2665'], + [/\\ding\{169\}/g, '\u2666'], + [/\\quarternote /g, '\u2669'], + [/\\eighthnote /g, '\u266A'], + [/\\flat /g, '\u266D'], + [/\\natural /g, '\u266E'], + [/\\sharp /g, '\u266F'], + [/\\ding\{33\}/g, '\u2701'], + [/\\ding\{34\}/g, '\u2702'], + [/\\ding\{35\}/g, '\u2703'], + [/\\ding\{36\}/g, '\u2704'], + [/\\ding\{38\}/g, '\u2706'], + [/\\ding\{39\}/g, '\u2707'], + [/\\ding\{40\}/g, '\u2708'], + [/\\ding\{41\}/g, '\u2709'], + [/\\ding\{44\}/g, '\u270C'], + [/\\ding\{45\}/g, '\u270D'], + [/\\ding\{46\}/g, '\u270E'], + [/\\ding\{47\}/g, '\u270F'], + [/\\ding\{48\}/g, '\u2710'], + [/\\ding\{49\}/g, '\u2711'], + [/\\ding\{50\}/g, '\u2712'], + [/\\ding\{51\}/g, '\u2713'], + [/\\ding\{52\}/g, '\u2714'], + [/\\ding\{53\}/g, '\u2715'], + [/\\ding\{54\}/g, '\u2716'], + [/\\ding\{55\}/g, '\u2717'], + [/\\ding\{56\}/g, '\u2718'], + [/\\ding\{57\}/g, '\u2719'], + [/\\ding\{58\}/g, '\u271A'], + [/\\ding\{59\}/g, '\u271B'], + [/\\ding\{60\}/g, '\u271C'], + [/\\ding\{61\}/g, '\u271D'], + [/\\ding\{62\}/g, '\u271E'], + [/\\ding\{63\}/g, '\u271F'], + [/\\ding\{64\}/g, '\u2720'], + [/\\ding\{65\}/g, '\u2721'], + [/\\ding\{66\}/g, '\u2722'], + [/\\ding\{67\}/g, '\u2723'], + [/\\ding\{68\}/g, '\u2724'], + [/\\ding\{69\}/g, '\u2725'], + [/\\ding\{70\}/g, '\u2726'], + [/\\ding\{71\}/g, '\u2727'], + [/\\ding\{73\}/g, '\u2729'], + [/\\ding\{74\}/g, '\u272A'], + [/\\ding\{75\}/g, '\u272B'], + [/\\ding\{76\}/g, '\u272C'], + [/\\ding\{77\}/g, '\u272D'], + [/\\ding\{78\}/g, '\u272E'], + [/\\ding\{79\}/g, '\u272F'], + [/\\ding\{80\}/g, '\u2730'], + [/\\ding\{81\}/g, '\u2731'], + [/\\ding\{82\}/g, '\u2732'], + [/\\ding\{83\}/g, '\u2733'], + [/\\ding\{84\}/g, '\u2734'], + [/\\ding\{85\}/g, '\u2735'], + [/\\ding\{86\}/g, '\u2736'], + [/\\ding\{87\}/g, '\u2737'], + [/\\ding\{88\}/g, '\u2738'], + [/\\ding\{89\}/g, '\u2739'], + [/\\ding\{90\}/g, '\u273A'], + [/\\ding\{91\}/g, '\u273B'], + [/\\ding\{92\}/g, '\u273C'], + [/\\ding\{93\}/g, '\u273D'], + [/\\ding\{94\}/g, '\u273E'], + [/\\ding\{95\}/g, '\u273F'], + [/\\ding\{96\}/g, '\u2740'], + [/\\ding\{97\}/g, '\u2741'], + [/\\ding\{98\}/g, '\u2742'], + [/\\ding\{99\}/g, '\u2743'], + [/\\ding\{100\}/g, '\u2744'], + [/\\ding\{101\}/g, '\u2745'], + [/\\ding\{102\}/g, '\u2746'], + [/\\ding\{103\}/g, '\u2747'], + [/\\ding\{104\}/g, '\u2748'], + [/\\ding\{105\}/g, '\u2749'], + [/\\ding\{106\}/g, '\u274A'], + [/\\ding\{107\}/g, '\u274B'], + [/\\ding\{109\}/g, '\u274D'], + [/\\ding\{111\}/g, '\u274F'], + [/\\ding\{112\}/g, '\u2750'], + [/\\ding\{113\}/g, '\u2751'], + [/\\ding\{114\}/g, '\u2752'], + [/\\ding\{118\}/g, '\u2756'], + [/\\ding\{120\}/g, '\u2758'], + [/\\ding\{121\}/g, '\u2759'], + [/\\ding\{122\}/g, '\u275A'], + [/\\ding\{123\}/g, '\u275B'], + [/\\ding\{124\}/g, '\u275C'], + [/\\ding\{125\}/g, '\u275D'], + [/\\ding\{126\}/g, '\u275E'], + [/\\ding\{161\}/g, '\u2761'], + [/\\ding\{162\}/g, '\u2762'], + [/\\ding\{163\}/g, '\u2763'], + [/\\ding\{164\}/g, '\u2764'], + [/\\ding\{165\}/g, '\u2765'], + [/\\ding\{166\}/g, '\u2766'], + [/\\ding\{167\}/g, '\u2767'], + [/\\ding\{182\}/g, '\u2776'], + [/\\ding\{183\}/g, '\u2777'], + [/\\ding\{184\}/g, '\u2778'], + [/\\ding\{185\}/g, '\u2779'], + [/\\ding\{186\}/g, '\u277A'], + [/\\ding\{187\}/g, '\u277B'], + [/\\ding\{188\}/g, '\u277C'], + [/\\ding\{189\}/g, '\u277D'], + [/\\ding\{190\}/g, '\u277E'], + [/\\ding\{191\}/g, '\u277F'], + [/\\ding\{192\}/g, '\u2780'], + [/\\ding\{193\}/g, '\u2781'], + [/\\ding\{194\}/g, '\u2782'], + [/\\ding\{195\}/g, '\u2783'], + [/\\ding\{196\}/g, '\u2784'], + [/\\ding\{197\}/g, '\u2785'], + [/\\ding\{198\}/g, '\u2786'], + [/\\ding\{199\}/g, '\u2787'], + [/\\ding\{200\}/g, '\u2788'], + [/\\ding\{201\}/g, '\u2789'], + [/\\ding\{202\}/g, '\u278A'], + [/\\ding\{203\}/g, '\u278B'], + [/\\ding\{204\}/g, '\u278C'], + [/\\ding\{205\}/g, '\u278D'], + [/\\ding\{206\}/g, '\u278E'], + [/\\ding\{207\}/g, '\u278F'], + [/\\ding\{208\}/g, '\u2790'], + [/\\ding\{209\}/g, '\u2791'], + [/\\ding\{210\}/g, '\u2792'], + [/\\ding\{211\}/g, '\u2793'], + [/\\ding\{212\}/g, '\u2794'], + [/\\ding\{216\}/g, '\u2798'], + [/\\ding\{217\}/g, '\u2799'], + [/\\ding\{218\}/g, '\u279A'], + [/\\ding\{219\}/g, '\u279B'], + [/\\ding\{220\}/g, '\u279C'], + [/\\ding\{221\}/g, '\u279D'], + [/\\ding\{222\}/g, '\u279E'], + [/\\ding\{223\}/g, '\u279F'], + [/\\ding\{224\}/g, '\u27A0'], + [/\\ding\{225\}/g, '\u27A1'], + [/\\ding\{226\}/g, '\u27A2'], + [/\\ding\{227\}/g, '\u27A3'], + [/\\ding\{228\}/g, '\u27A4'], + [/\\ding\{229\}/g, '\u27A5'], + [/\\ding\{230\}/g, '\u27A6'], + [/\\ding\{231\}/g, '\u27A7'], + [/\\ding\{232\}/g, '\u27A8'], + [/\\ding\{233\}/g, '\u27A9'], + [/\\ding\{234\}/g, '\u27AA'], + [/\\ding\{235\}/g, '\u27AB'], + [/\\ding\{236\}/g, '\u27AC'], + [/\\ding\{237\}/g, '\u27AD'], + [/\\ding\{238\}/g, '\u27AE'], + [/\\ding\{239\}/g, '\u27AF'], + [/\\ding\{241\}/g, '\u27B1'], + [/\\ding\{242\}/g, '\u27B2'], + [/\\ding\{243\}/g, '\u27B3'], + [/\\ding\{244\}/g, '\u27B4'], + [/\\ding\{245\}/g, '\u27B5'], + [/\\ding\{246\}/g, '\u27B6'], + [/\\ding\{247\}/g, '\u27B7'], + [/\\ding\{248\}/g, '\u27B8'], + [/\\ding\{249\}/g, '\u27B9'], + [/\\ding\{250\}/g, '\u27BA'], + [/\\ding\{251\}/g, '\u27BB'], + [/\\ding\{252\}/g, '\u27BC'], + [/\\ding\{253\}/g, '\u27BD'], + [/\\ding\{254\}/g, '\u27BE'], + [/\\longleftarrow /g, '\u27F5'], + [/\\longrightarrow /g, '\u27F6'], + [/\\longleftrightarrow /g, '\u27F7'], + [/\\Longleftarrow /g, '\u27F8'], + [/\\Longrightarrow /g, '\u27F9'], + [/\\Longleftrightarrow /g, '\u27FA'], + [/\\longmapsto /g, '\u27FC'], + [/\\sim\\joinrel\\leadsto/g, '\u27FF'], + [/\\ElsevierGlyph\{E212\}/g, '\u2905'], + [/\\UpArrowBar /g, '\u2912'], + [/\\DownArrowBar /g, '\u2913'], + [/\\ElsevierGlyph\{E20C\}/g, '\u2923'], + [/\\ElsevierGlyph\{E20D\}/g, '\u2924'], + [/\\ElsevierGlyph\{E20B\}/g, '\u2925'], + [/\\ElsevierGlyph\{E20A\}/g, '\u2926'], + [/\\ElsevierGlyph\{E211\}/g, '\u2927'], + [/\\ElsevierGlyph\{E20E\}/g, '\u2928'], + [/\\ElsevierGlyph\{E20F\}/g, '\u2929'], + [/\\ElsevierGlyph\{E210\}/g, '\u292A'], + [/\\ElsevierGlyph\{E21C\}/g, '\u2933'], + [/\\ElsevierGlyph\{E21D\}/g, '\u2933-00338'], + [/\\ElsevierGlyph\{E21A\}/g, '\u2936'], + [/\\ElsevierGlyph\{E219\}/g, '\u2937'], + [/\\Elolarr /g, '\u2940'], + [/\\Elorarr /g, '\u2941'], + [/\\ElzRlarr /g, '\u2942'], + [/\\ElzrLarr /g, '\u2944'], + [/\\Elzrarrx /g, '\u2947'], + [/\\LeftRightVector /g, '\u294E'], + [/\\RightUpDownVector /g, '\u294F'], + [/\\DownLeftRightVector /g, '\u2950'], + [/\\LeftUpDownVector /g, '\u2951'], + [/\\LeftVectorBar /g, '\u2952'], + [/\\RightVectorBar /g, '\u2953'], + [/\\RightUpVectorBar /g, '\u2954'], + [/\\RightDownVectorBar /g, '\u2955'], + [/\\DownLeftVectorBar /g, '\u2956'], + [/\\DownRightVectorBar /g, '\u2957'], + [/\\LeftUpVectorBar /g, '\u2958'], + [/\\LeftDownVectorBar /g, '\u2959'], + [/\\LeftTeeVector /g, '\u295A'], + [/\\RightTeeVector /g, '\u295B'], + [/\\RightUpTeeVector /g, '\u295C'], + [/\\RightDownTeeVector /g, '\u295D'], + [/\\DownLeftTeeVector /g, '\u295E'], + [/\\DownRightTeeVector /g, '\u295F'], + [/\\LeftUpTeeVector /g, '\u2960'], + [/\\LeftDownTeeVector /g, '\u2961'], + [/\\UpEquilibrium /g, '\u296E'], + [/\\ReverseUpEquilibrium /g, '\u296F'], + [/\\RoundImplies /g, '\u2970'], + [/\\ElsevierGlyph\{E214\}/g, '\u297C'], + [/\\ElsevierGlyph\{E215\}/g, '\u297D'], + [/\\Elztfnc /g, '\u2980'], + [/\\ElsevierGlyph\{3018\}/g, '\u2985'], + [/\\Elroang /g, '\u2986'], + [/\\ElsevierGlyph\{E291\}/g, '\u2994'], + [/\\Elzddfnc /g, '\u2999'], + [/\\Angle /g, '\u299C'], + [/\\Elzlpargt /g, '\u29A0'], + [/\\ElsevierGlyph\{E260\}/g, '\u29B5'], + [/\\ElsevierGlyph\{E61B\}/g, '\u29B6'], + [/\\ElzLap /g, '\u29CA'], + [/\\Elzdefas /g, '\u29CB'], + [/\\LeftTriangleBar /g, '\u29CF'], + [/\\NotLeftTriangleBar /g, '\u29CF-00338'], + [/\\RightTriangleBar /g, '\u29D0'], + [/\\NotRightTriangleBar /g, '\u29D0-00338'], + [/\\ElsevierGlyph\{E372\}/g, '\u29DC'], + [/\\blacklozenge /g, '\u29EB'], + [/\\RuleDelayed /g, '\u29F4'], + [/\\Elxuplus /g, '\u2A04'], + [/\\ElzThr /g, '\u2A05'], + [/\\Elxsqcup /g, '\u2A06'], + [/\\ElzInf /g, '\u2A07'], + [/\\ElzSup /g, '\u2A08'], + [/\\ElzCint /g, '\u2A0D'], + [/\\clockoint /g, '\u2A0F'], + [/\\ElsevierGlyph\{E395\}/g, '\u2A10'], + [/\\sqrint /g, '\u2A16'], + [/\\ElsevierGlyph\{E25A\}/g, '\u2A25'], + [/\\ElsevierGlyph\{E25B\}/g, '\u2A2A'], + [/\\ElsevierGlyph\{E25C\}/g, '\u2A2D'], + [/\\ElsevierGlyph\{E25D\}/g, '\u2A2E'], + [/\\ElzTimes /g, '\u2A2F'], + [/\\ElsevierGlyph\{E25E\}/g, '\u2A34'], + [/\\ElsevierGlyph\{E25E\}/g, '\u2A35'], + [/\\ElsevierGlyph\{E259\}/g, '\u2A3C'], + [/\\amalg /g, '\u2A3F'], + [/\\ElzAnd /g, '\u2A53'], + [/\\ElzOr /g, '\u2A54'], + [/\\ElsevierGlyph\{E36E\}/g, '\u2A55'], + [/\\ElOr /g, '\u2A56'], + [/\\perspcorrespond /g, '\u2A5E'], + [/\\Elzminhat /g, '\u2A5F'], + [/\\ElsevierGlyph\{225A\}/g, '\u2A63'], + [/\\stackrel\{*\}\{=\}/g, '\u2A6E'], + [/\\Equal /g, '\u2A75'], + [/\\leqslant /g, '\u2A7D'], + [/\\nleqslant /g, '\u2A7D-00338'], + [/\\geqslant /g, '\u2A7E'], + [/\\ngeqslant /g, '\u2A7E-00338'], + [/\\lessapprox /g, '\u2A85'], + [/\\gtrapprox /g, '\u2A86'], + [/\\lneq /g, '\u2A87'], + [/\\gneq /g, '\u2A88'], + [/\\lnapprox /g, '\u2A89'], + [/\\gnapprox /g, '\u2A8A'], + [/\\lesseqqgtr /g, '\u2A8B'], + [/\\gtreqqless /g, '\u2A8C'], + [/\\eqslantless /g, '\u2A95'], + [/\\eqslantgtr /g, '\u2A96'], + [/\\Pisymbol\{ppi020\}\{117\}/g, '\u2A9D'], + [/\\Pisymbol\{ppi020\}\{105\}/g, '\u2A9E'], + [/\\NestedLessLess /g, '\u2AA1'], + [/\\NotNestedLessLess /g, '\u2AA1-00338'], + [/\\NestedGreaterGreater /g, '\u2AA2'], + [/\\NotNestedGreaterGreater /g, '\u2AA2-00338'], + [/\\preceq /g, '\u2AAF'], + [/\\not\\preceq /g, '\u2AAF-00338'], + [/\\succeq /g, '\u2AB0'], + [/\\not\\succeq /g, '\u2AB0-00338'], + [/\\precneqq /g, '\u2AB5'], + [/\\succneqq /g, '\u2AB6'], + [/\\precapprox /g, '\u2AB7'], + [/\\succapprox /g, '\u2AB8'], + [/\\precnapprox /g, '\u2AB9'], + [/\\succnapprox /g, '\u2ABA'], + [/\\subseteqq /g, '\u2AC5'], + [/\\nsubseteqq /g, '\u2AC5-00338'], + [/\\supseteqq /g, '\u2AC6'], + [/\\nsupseteqq/g, '\u2AC6-00338'], + [/\\subsetneqq /g, '\u2ACB'], + [/\\supsetneqq /g, '\u2ACC'], + [/\\ElsevierGlyph\{E30D\}/g, '\u2AEB'], + [/\\Elztdcol /g, '\u2AF6'], + [/\\ElsevierGlyph\{300A\}/g, '\u300A'], + [/\\ElsevierGlyph\{300B\}/g, '\u300B'], + [/\\ElsevierGlyph\{3018\}/g, '\u3018'], + [/\\ElsevierGlyph\{3019\}/g, '\u3019'], + [/\\openbracketleft /g, '\u301A'], + [/\\openbracketright /g, '\u301B'], +] + +export default BibtexParser +if (typeof module !== 'undefined' && module.exports) { + module.exports = BibtexParser +} diff --git a/services/references/buildscript.txt b/services/references/buildscript.txt new file mode 100644 index 0000000000..05771cd85a --- /dev/null +++ b/services/references/buildscript.txt @@ -0,0 +1,9 @@ +references +--dependencies=mongo +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.5.0 diff --git a/services/references/config/settings.defaults.cjs b/services/references/config/settings.defaults.cjs new file mode 100644 index 0000000000..2551f99f09 --- /dev/null +++ b/services/references/config/settings.defaults.cjs @@ -0,0 +1,9 @@ +module.exports = { + internal: { + references: { + port: 3056, + host: process.env.REFERENCES_HOST || '127.0.0.1', + }, + }, +} + diff --git a/services/references/docker-compose.ci.yml b/services/references/docker-compose.ci.yml new file mode 100644 index 0000000000..51eb64d126 --- /dev/null +++ b/services/references/docker-compose.ci.yml @@ -0,0 +1,52 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + mongo: + condition: service_started + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/references/docker-compose.yml b/services/references/docker-compose.yml new file mode 100644 index 0000000000..ad71431768 --- /dev/null +++ b/services/references/docker-compose.yml @@ -0,0 +1,56 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/references + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/references + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/references + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/references + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + mongo: + condition: service_started + command: npm run --silent test:acceptance + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + diff --git a/services/references/package.json b/services/references/package.json new file mode 100644 index 0000000000..9b0988e7ac --- /dev/null +++ b/services/references/package.json @@ -0,0 +1,26 @@ +{ + "name": "@overleaf/references", + "description": "An API for providing citation-keys", + "private": true, + "type": "module", + "main": "app.js", + "scripts": { + "start": "node app.js" + }, + "version": "0.1.0", + "dependencies": { + "@overleaf/settings": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "async": "^3.2.5", + "express": "^4.21.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "esmock": "^2.6.9", + "mocha": "^11.1.0", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + } +} diff --git a/services/references/tsconfig.json b/services/references/tsconfig.json new file mode 100644 index 0000000000..d3fdd3022a --- /dev/null +++ b/services/references/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/web/app.mjs b/services/web/app.mjs index b7c723da3d..3f54cc36a8 100644 --- a/services/web/app.mjs +++ b/services/web/app.mjs @@ -56,14 +56,8 @@ if (Settings.catchErrors) { // Create ./data/dumpFolder if needed FileWriter.ensureDumpFolderExists() -if ( - !Features.hasFeature('project-history-blobs') && - !Features.hasFeature('filestore') -) { - throw new Error( - 'invalid config: must enable either project-history-blobs (Settings.enableProjectHistoryBlobs=true) or enable filestore (Settings.disableFilestore=false)' - ) -} +// Validate combination of feature flags. +Features.validateSettings() // handle SIGTERM for graceful shutdown in kubernetes process.on('SIGTERM', function (signal) { diff --git a/services/web/app/src/Features/Authentication/AuthenticationController.js b/services/web/app/src/Features/Authentication/AuthenticationController.js index 7a97d2ac9c..a190ddab5a 100644 --- a/services/web/app/src/Features/Authentication/AuthenticationController.js +++ b/services/web/app/src/Features/Authentication/AuthenticationController.js @@ -36,7 +36,22 @@ function send401WithChallenge(res) { function checkCredentials(userDetailsMap, user, password) { const expectedPassword = userDetailsMap.get(user) const userExists = userDetailsMap.has(user) && expectedPassword // user exists with a non-null password - const isValid = userExists && tsscmp(expectedPassword, password) + + let isValid = false + if (userExists) { + if (Array.isArray(expectedPassword)) { + const isValidPrimary = Boolean( + expectedPassword[0] && tsscmp(expectedPassword[0], password) + ) + const isValidFallback = Boolean( + expectedPassword[1] && tsscmp(expectedPassword[1], password) + ) + isValid = isValidPrimary || isValidFallback + } else { + isValid = tsscmp(expectedPassword, password) + } + } + if (!isValid) { logger.err({ user }, 'invalid login details') } @@ -82,6 +97,7 @@ const AuthenticationController = { analyticsId: user.analyticsId || user._id, alphaProgram: user.alphaProgram || undefined, // only store if set betaProgram: user.betaProgram || undefined, // only store if set + externalAuth: user.externalAuth || false, } if (user.isAdmin) { lightUser.isAdmin = true diff --git a/services/web/app/src/Features/Compile/ClsiManager.js b/services/web/app/src/Features/Compile/ClsiManager.js index 6f11297248..94208e8607 100644 --- a/services/web/app/src/Features/Compile/ClsiManager.js +++ b/services/web/app/src/Features/Compile/ClsiManager.js @@ -692,7 +692,7 @@ async function _getContentFromMongo(projectId) { function _finaliseRequest(projectId, options, project, docs, files) { const resources = [] - let flags + let flags = [] let rootResourcePath = null let rootResourcePathOverride = null let hasMainFile = false @@ -771,6 +771,10 @@ function _finaliseRequest(projectId, options, project, docs, files) { flags = ['-file-line-error'] } + if (process.env.TEX_COMPILER_EXTRA_FLAGS) { + flags.push(...process.env.TEX_COMPILER_EXTRA_FLAGS.split(/\s+/).filter(Boolean)) + } + return { compile: { options: { diff --git a/services/web/app/src/Features/Documents/DocumentController.mjs b/services/web/app/src/Features/Documents/DocumentController.mjs index 6998c0b36a..9a16811894 100644 --- a/services/web/app/src/Features/Documents/DocumentController.mjs +++ b/services/web/app/src/Features/Documents/DocumentController.mjs @@ -7,6 +7,7 @@ import logger from '@overleaf/logger' import _ from 'lodash' import { plainTextResponse } from '../../infrastructure/Response.js' import { expressify } from '@overleaf/promise-utils' +import Modules from '../../infrastructure/Modules.js' async function getDocument(req, res) { const { Project_id: projectId, doc_id: docId } = req.params @@ -92,6 +93,9 @@ async function setDocument(req, res) { { docId, projectId }, 'finished receiving set document request from api (docupdater)' ) + + await Modules.promises.hooks.fire('docModified', projectId, docId) + res.json(result) } diff --git a/services/web/app/src/Features/Helpers/UrlHelper.js b/services/web/app/src/Features/Helpers/UrlHelper.js index cf686636b8..1c70e792f0 100644 --- a/services/web/app/src/Features/Helpers/UrlHelper.js +++ b/services/web/app/src/Features/Helpers/UrlHelper.js @@ -32,14 +32,6 @@ module.exports = { getCanonicalURL, getSafeRedirectPath, getSafeAdminDomainRedirect, - wrapUrlWithProxy(url) { - // TODO: Consider what to do for Community and Enterprise edition? - if (!Settings.apis.linkedUrlProxy.url) { - throw new Error('no linked url proxy configured') - } - return `${Settings.apis.linkedUrlProxy.url}?url=${encodeURIComponent(url)}` - }, - prependHttpIfNeeded(url) { if (!url.match('://')) { url = `http://${url}` diff --git a/services/web/app/src/Features/History/HistoryURLHelper.js b/services/web/app/src/Features/History/HistoryURLHelper.js index 8b8d8cbdd7..acb43ced68 100644 --- a/services/web/app/src/Features/History/HistoryURLHelper.js +++ b/services/web/app/src/Features/History/HistoryURLHelper.js @@ -8,7 +8,7 @@ function projectHistoryURLWithFilestoreFallback( ) { const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}` // TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests. - if (fileRef.hash && Settings.enableProjectHistoryBlobs) { + if (fileRef.hash && Settings.filestoreMigrationLevel >= 1) { return { url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`, fallbackURL: filestoreURL, diff --git a/services/web/app/src/Features/LinkedFiles/UrlAgent.mjs b/services/web/app/src/Features/LinkedFiles/UrlAgent.mjs index 12785d7c04..3c75034b34 100644 --- a/services/web/app/src/Features/LinkedFiles/UrlAgent.mjs +++ b/services/web/app/src/Features/LinkedFiles/UrlAgent.mjs @@ -72,7 +72,6 @@ function _getUrl(projectId, data, currentUserId) { if (!urlValidator.isWebUri(url)) { throw new InvalidUrlError(`invalid url: ${url}`) } - url = UrlHelper.wrapUrlWithProxy(url) return url } diff --git a/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs b/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs index 094f18b95f..2c1aefe6a6 100644 --- a/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs +++ b/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs @@ -72,6 +72,7 @@ async function getUserForPasswordResetToken(token) { 'overleaf.id': 1, email: 1, must_reconfirm: 1, + hashedPassword: 1, }) await assertUserPermissions(user, ['change-password']) diff --git a/services/web/app/src/Features/Project/ProjectController.js b/services/web/app/src/Features/Project/ProjectController.js index 6de47892bc..86e06699d1 100644 --- a/services/web/app/src/Features/Project/ProjectController.js +++ b/services/web/app/src/Features/Project/ProjectController.js @@ -590,7 +590,7 @@ const _ProjectController = { } const isAdminOrTemplateOwner = - hasAdminAccess(user) || Settings.templates?.user_id === userId + hasAdminAccess(user) || Settings.templates?.nonAdminCanManage const showTemplatesServerPro = Features.hasFeature('templates-server-pro') && isAdminOrTemplateOwner diff --git a/services/web/app/src/Features/Project/ProjectEditorHandler.js b/services/web/app/src/Features/Project/ProjectEditorHandler.js index 3d3d300e66..810fac7f83 100644 --- a/services/web/app/src/Features/Project/ProjectEditorHandler.js +++ b/services/web/app/src/Features/Project/ProjectEditorHandler.js @@ -4,7 +4,7 @@ const Path = require('path') const Features = require('../../infrastructure/Features') module.exports = ProjectEditorHandler = { - trackChangesAvailable: false, + trackChangesAvailable: true, buildProjectModelView( project, @@ -27,7 +27,7 @@ module.exports = ProjectEditorHandler = { deletedByExternalDataSource: project.deletedByExternalDataSource || false, imageName: project.imageName != null - ? Path.basename(project.imageName) + ? project.imageName : undefined, } diff --git a/services/web/app/src/Features/Project/ProjectOptionsHandler.js b/services/web/app/src/Features/Project/ProjectOptionsHandler.js index c0c11c396c..5d0001bcf4 100644 --- a/services/web/app/src/Features/Project/ProjectOptionsHandler.js +++ b/services/web/app/src/Features/Project/ProjectOptionsHandler.js @@ -24,7 +24,6 @@ const ProjectOptionsHandler = { if (!imageName || !Array.isArray(settings.allowedImageNames)) { return } - imageName = imageName.toLowerCase() const isAllowed = settings.allowedImageNames.find( allowed => imageName === allowed.imageName ) @@ -32,7 +31,7 @@ const ProjectOptionsHandler = { throw new Error(`invalid imageName: ${imageName}`) } const conditions = { _id: projectId } - const update = { imageName: settings.imageRoot + '/' + imageName } + const update = { imageName: imageName } return Project.updateOne(conditions, update, {}) }, diff --git a/services/web/app/src/Features/Templates/TemplatesController.js b/services/web/app/src/Features/Templates/TemplatesController.js index 39c4d50ae0..257de2b0c3 100644 --- a/services/web/app/src/Features/Templates/TemplatesController.js +++ b/services/web/app/src/Features/Templates/TemplatesController.js @@ -7,21 +7,22 @@ const { expressify } = require('@overleaf/promise-utils') const TemplatesController = { async getV1Template(req, res) { - const templateVersionId = req.params.Template_version_id - const templateId = req.query.id - if (!/^[0-9]+$/.test(templateVersionId) || !/^[0-9]+$/.test(templateId)) { - logger.err( - { templateVersionId, templateId }, - 'invalid template id or version' - ) - return res.sendStatus(400) - } + const templateId = req.params.Template_version_id + const templateVersionId = req.query.version +// if (!/^[0-9]+$/.test(templateVersionId) || !/^[0-9]+$/.test(templateId)) { +// logger.err( +// { templateVersionId, templateId }, +// 'invalid template id or version' +// ) +// return res.sendStatus(400) +// } const data = { templateVersionId, templateId, - name: req.query.templateName, - compiler: ProjectHelper.compilerFromV1Engine(req.query.latexEngine), - imageName: req.query.texImage, + name: req.query.name, + compiler: req.query.compiler, + language: req.query.language, + imageName: req.query.imageName, mainFile: req.query.mainFile, brandVariationId: req.query.brandVariationId, } @@ -36,6 +37,7 @@ const TemplatesController = { async createProjectFromV1Template(req, res) { const userId = SessionManager.getLoggedInUserId(req.session) + const project = await TemplatesManager.promises.createProjectFromV1Template( req.body.brandVariationId, req.body.compiler, @@ -44,7 +46,8 @@ const TemplatesController = { req.body.templateName, req.body.templateVersionId, userId, - req.body.imageName + req.body.imageName, + req.body.language ) delete req.session.templateData if (!project) { diff --git a/services/web/app/src/Features/Templates/TemplatesManager.js b/services/web/app/src/Features/Templates/TemplatesManager.js index 6a2b6207c1..77cde12ea9 100644 --- a/services/web/app/src/Features/Templates/TemplatesManager.js +++ b/services/web/app/src/Features/Templates/TemplatesManager.js @@ -18,6 +18,7 @@ const crypto = require('crypto') const Errors = require('../Errors/Errors') const { pipeline } = require('stream/promises') const ClsiCacheManager = require('../Compile/ClsiCacheManager') +const TIMEOUT = 30000 // 30 sec const TemplatesManager = { async createProjectFromV1Template( @@ -28,25 +29,19 @@ const TemplatesManager = { templateName, templateVersionId, userId, - imageName + imageName, + language ) { - const zipUrl = `${settings.apis.v1.url}/api/v1/overleaf/templates/${templateVersionId}` + const zipUrl = `${settings.apis.filestore.url}/template/${templateId}/v/${templateVersionId}/zip` const zipReq = await fetchStreamWithResponse(zipUrl, { - basicAuth: { - user: settings.apis.v1.user, - password: settings.apis.v1.pass, - }, - signal: AbortSignal.timeout(settings.apis.v1.timeout), + signal: AbortSignal.timeout(TIMEOUT), }) const projectName = ProjectDetailsHandler.fixProjectName(templateName) const dumpPath = `${settings.path.dumpFolder}/${crypto.randomUUID()}` const writeStream = fs.createWriteStream(dumpPath) try { - const attributes = { - fromV1TemplateId: templateId, - fromV1TemplateVersionId: templateVersionId, - } + const attributes = {} await pipeline(zipReq.stream, writeStream) if (zipReq.response.status !== 200) { @@ -78,14 +73,9 @@ const TemplatesManager = { await TemplatesManager._setCompiler(project._id, compiler) await TemplatesManager._setImage(project._id, imageName) await TemplatesManager._setMainFile(project._id, mainFile) + await TemplatesManager._setSpellCheckLanguage(project._id, language) await TemplatesManager._setBrandVariationId(project._id, brandVariationId) - const update = { - fromV1TemplateId: templateId, - fromV1TemplateVersionId: templateVersionId, - } - await Project.updateOne({ _id: project._id }, update, {}) - await prepareClsiCacheInBackground return project @@ -102,11 +92,12 @@ const TemplatesManager = { }, async _setImage(projectId, imageName) { - if (!imageName) { - imageName = 'wl_texlive:2018.1' + try { + await ProjectOptionsHandler.setImageName(projectId, imageName) + } catch { + logger.warn({ imageName: imageName }, 'not available') + await ProjectOptionsHandler.setImageName(projectId, settings.currentImageName) } - - await ProjectOptionsHandler.setImageName(projectId, imageName) }, async _setMainFile(projectId, mainFile) { @@ -116,6 +107,13 @@ const TemplatesManager = { await ProjectRootDocManager.setRootDocFromName(projectId, mainFile) }, + async _setSpellCheckLanguage(projectId, language) { + if (language == null) { + return + } + await ProjectOptionsHandler.setSpellCheckLanguage(projectId, language) + }, + async _setBrandVariationId(projectId, brandVariationId) { if (brandVariationId == null) { return diff --git a/services/web/app/src/Features/Uploads/ProjectUploadController.mjs b/services/web/app/src/Features/Uploads/ProjectUploadController.mjs index a3bc434ed7..84b8738af3 100644 --- a/services/web/app/src/Features/Uploads/ProjectUploadController.mjs +++ b/services/web/app/src/Features/Uploads/ProjectUploadController.mjs @@ -66,7 +66,7 @@ function uploadProject(req, res, next) { async function uploadFile(req, res, next) { const timer = new metrics.Timer('file-upload') const name = req.body.name - const path = req.file?.path + const { path } = req.file const projectId = req.params.Project_id const userId = SessionManager.getLoggedInUserId(req.session) let { folder_id: folderId } = req.query @@ -162,8 +162,14 @@ function multerMiddleware(req, res, next) { .status(422) .json({ success: false, error: req.i18n.translate('file_too_large') }) } - - return next(err) + if (err) return next(err) + if (!req.file?.path) { + logger.info({ req }, 'missing req.file.path on upload') + return res + .status(400) + .json({ success: false, error: 'invalid_upload_request' }) + } + next() }) } diff --git a/services/web/app/src/Features/User/UserController.js b/services/web/app/src/Features/User/UserController.js index b767dcd4a1..cabab8c891 100644 --- a/services/web/app/src/Features/User/UserController.js +++ b/services/web/app/src/Features/User/UserController.js @@ -518,4 +518,5 @@ module.exports = { expireDeletedUsersAfterDuration: expressify(expireDeletedUsersAfterDuration), ensureAffiliationMiddleware: expressify(ensureAffiliationMiddleware), ensureAffiliation, + doLogout, } diff --git a/services/web/app/src/Features/User/UserPagesController.mjs b/services/web/app/src/Features/User/UserPagesController.mjs index 8b5263c37d..c7ad5b30f4 100644 --- a/services/web/app/src/Features/User/UserPagesController.mjs +++ b/services/web/app/src/Features/User/UserPagesController.mjs @@ -52,10 +52,8 @@ async function settingsPage(req, res) { const reconfirmedViaSAML = _.get(req.session, ['saml', 'reconfirmed']) delete req.session.saml let shouldAllowEditingDetails = true - if (Settings.ldap && Settings.ldap.updateUserDetailsOnLogin) { - shouldAllowEditingDetails = false - } - if (Settings.saml && Settings.saml.updateUserDetailsOnLogin) { + const externalAuth = req.user.externalAuth + if (externalAuth && Settings[externalAuth].updateUserDetailsOnLogin) { shouldAllowEditingDetails = false } const oauthProviders = Settings.oauthProviders || {} diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js index 5cf9501c29..6e45e82bc8 100644 --- a/services/web/app/src/infrastructure/ExpressLocals.js +++ b/services/web/app/src/infrastructure/ExpressLocals.js @@ -107,9 +107,9 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { webRouter.use(function (req, res, next) { req.externalAuthenticationSystemUsed = - Features.externalAuthenticationSystemUsed + () => !!req?.user?.externalAuth res.locals.externalAuthenticationSystemUsed = - Features.externalAuthenticationSystemUsed + () => !!req?.user?.externalAuth req.hasFeature = res.locals.hasFeature = Features.hasFeature next() }) @@ -434,7 +434,7 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { labsEnabled: Settings.labs && Settings.labs.enable, wikiEnabled: Settings.overleaf != null || Settings.proxyLearn, templatesEnabled: - Settings.overleaf != null || Settings.templates?.user_id != null, + Settings.overleaf != null || Boolean(Settings.templates), cioWriteKey: Settings.analytics?.cio?.writeKey, cioSiteId: Settings.analytics?.cio?.siteId, } diff --git a/services/web/app/src/infrastructure/Features.js b/services/web/app/src/infrastructure/Features.js index aaf51103b9..c26a768793 100644 --- a/services/web/app/src/infrastructure/Features.js +++ b/services/web/app/src/infrastructure/Features.js @@ -12,15 +12,12 @@ const trackChangesModuleAvailable = /** * @typedef {Object} Settings * @property {Object | undefined} apis - * @property {Object | undefined} apis.linkedUrlProxy - * @property {string | undefined} apis.linkedUrlProxy.url * @property {Object | undefined} apis.references * @property {string | undefined} apis.references.url * @property {boolean | undefined} enableGithubSync * @property {boolean | undefined} enableGitBridge * @property {boolean | undefined} enableHomepage - * @property {boolean | undefined} enableProjectHistoryBlobs - * @property {boolean | undefined} disableFilestore + * @property {number} filestoreMigrationLevel * @property {boolean | undefined} enableSaml * @property {boolean | undefined} ldap * @property {boolean | undefined} oauth @@ -30,6 +27,14 @@ const trackChangesModuleAvailable = */ const Features = { + validateSettings() { + if (![0, 1, 2].includes(Settings.filestoreMigrationLevel)) { + throw new Error( + `invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2` + ) + } + }, + /** * @returns {boolean} */ @@ -56,7 +61,7 @@ const Features = { case 'registration-page': return ( !Features.externalAuthenticationSystemUsed() || - Boolean(Settings.overleaf) + Boolean(Settings.overleaf) || Settings.oidc?.allowedOIDCEmailDomains ) case 'registration': return Boolean(Settings.overleaf) @@ -69,7 +74,7 @@ const Features = { case 'oauth': return Boolean(Settings.oauth) case 'templates-server-pro': - return Boolean(Settings.templates?.user_id) + return Boolean(Settings.templates) case 'affiliations': case 'analytics': return Boolean(_.get(Settings, ['apis', 'v1', 'url'])) @@ -85,13 +90,12 @@ const Features = { ) case 'link-url': return Boolean( - _.get(Settings, ['apis', 'linkedUrlProxy', 'url']) && - Settings.enabledLinkedFileTypes.includes('url') + Settings.enabledLinkedFileTypes.includes('url') ) case 'project-history-blobs': - return Boolean(Settings.enableProjectHistoryBlobs) + return Settings.filestoreMigrationLevel > 0 case 'filestore': - return Boolean(Settings.disableFilestore) === false + return Settings.filestoreMigrationLevel < 2 case 'support': return supportModuleAvailable case 'symbol-palette': diff --git a/services/web/app/src/infrastructure/Modules.js b/services/web/app/src/infrastructure/Modules.js index 20975a3642..aea3aeb087 100644 --- a/services/web/app/src/infrastructure/Modules.js +++ b/services/web/app/src/infrastructure/Modules.js @@ -150,8 +150,7 @@ async function linkedFileAgentsIncludes() { async function attachHooks() { for (const module of await modules()) { const { promises, ...hooks } = module.hooks || {} - for (const hook in promises || {}) { - const method = promises[hook] + for (const [hook, method] of Object.entries(promises || {})) { attachHook(hook, method) } for (const hook in hooks || {}) { diff --git a/services/web/app/src/router.mjs b/services/web/app/src/router.mjs index e727fa7bc5..c385be9b8e 100644 --- a/services/web/app/src/router.mjs +++ b/services/web/app/src/router.mjs @@ -217,6 +217,8 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { CaptchaMiddleware.canSkipCaptcha ) + await Modules.applyRouter(webRouter, privateApiRouter, publicApiRouter) + webRouter.get('/login', UserPagesController.loginPage) AuthenticationController.addEndpointToLoginWhitelist('/login') @@ -262,6 +264,8 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { '/read-only/one-time-login' ) + await Modules.applyRouter(webRouter, privateApiRouter, publicApiRouter) + webRouter.post('/logout', UserController.logout) webRouter.get('/restricted', AuthorizationMiddleware.restricted) @@ -285,8 +289,6 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { TokenAccessRouter.apply(webRouter) HistoryRouter.apply(webRouter, privateApiRouter) - await Modules.applyRouter(webRouter, privateApiRouter, publicApiRouter) - if (Settings.enableSubscriptions) { webRouter.get( '/user/bonus', @@ -1271,6 +1273,10 @@ async function initialize(webRouter, privateApiRouter, publicApiRouter) { TokenAccessController.grantTokenAccessReadOnly ) + webRouter.get(['/learn*', '/blog*', '/latex*', '/for/*', '/contact*'], (req, res) => { + res.redirect(301, `https://www.overleaf.com${req.originalUrl}`) + }) + webRouter.get('/unsupported-browser', renderUnsupportedBrowserPage) webRouter.get('*', ErrorController.notFound) diff --git a/services/web/app/views/_cookie_banner.pug b/services/web/app/views/_cookie_banner.pug index 56974326cd..7cbc569bc1 100644 --- a/services/web/app/views/_cookie_banner.pug +++ b/services/web/app/views/_cookie_banner.pug @@ -1,13 +1,13 @@ -section.cookie-banner.hidden-print.hidden(aria-label='Cookie banner') - .cookie-banner-content We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our cookie policy. +section.cookie-banner.hidden-print.hidden(aria-label=translate('cookie_banner')) + .cookie-banner-content !{translate('cookie_banner_info', {}, [{ name: 'a', attrs: { href: '/legal#Cookies' }}])} .cookie-banner-actions button( type='button' class='btn btn-link btn-sm' data-ol-cookie-banner-set-consent='essential' - ) Essential cookies only + ) #{translate('essential_cookies_only')} button( type='button' class='btn btn-primary btn-sm' data-ol-cookie-banner-set-consent='all' - ) Accept all cookies + ) #{translate('accept_all_cookies')} diff --git a/services/web/app/views/general/post-gateway.pug b/services/web/app/views/general/post-gateway.pug index c6bbc92d01..86f379ac1b 100644 --- a/services/web/app/views/general/post-gateway.pug +++ b/services/web/app/views/general/post-gateway.pug @@ -4,7 +4,7 @@ block vars - var suppressNavbar = true - var suppressFooter = true - var suppressSkipToContent = true - - var suppressCookieBanner = true + - var suppressPugCookieBanner = true block content .content.content-alt diff --git a/services/web/app/views/layout-marketing.pug b/services/web/app/views/layout-marketing.pug index b54c30f033..26e4eb539d 100644 --- a/services/web/app/views/layout-marketing.pug +++ b/services/web/app/views/layout-marketing.pug @@ -24,7 +24,7 @@ block body else include layout/fat-footer - if typeof suppressCookieBanner == 'undefined' + if typeof suppressPugCookieBanner == 'undefined' include _cookie_banner if bootstrapVersion === 5 diff --git a/services/web/app/views/layout-react.pug b/services/web/app/views/layout-react.pug index 94ff3ba247..e9c4c932c4 100644 --- a/services/web/app/views/layout-react.pug +++ b/services/web/app/views/layout-react.pug @@ -69,5 +69,5 @@ block body else include layout/fat-footer-react-bootstrap-5 - if typeof suppressCookieBanner === 'undefined' + if typeof suppressPugCookieBanner === 'undefined' include _cookie_banner diff --git a/services/web/app/views/layout-website-redesign.pug b/services/web/app/views/layout-website-redesign.pug index 61ed83043b..aa7fea9f07 100644 --- a/services/web/app/views/layout-website-redesign.pug +++ b/services/web/app/views/layout-website-redesign.pug @@ -27,7 +27,7 @@ block body else include layout/fat-footer-website-redesign - if typeof suppressCookieBanner == 'undefined' + if typeof suppressPugCookieBanner == 'undefined' include _cookie_banner block contactModal diff --git a/services/web/app/views/layout/navbar-marketing-bootstrap-5.pug b/services/web/app/views/layout/navbar-marketing-bootstrap-5.pug index 75cc065e73..65e95a2290 100644 --- a/services/web/app/views/layout/navbar-marketing-bootstrap-5.pug +++ b/services/web/app/views/layout/navbar-marketing-bootstrap-5.pug @@ -161,6 +161,18 @@ nav.navbar.navbar-default.navbar-main.navbar-expand-lg( event-segmentation={page: currentUrl, item: 'register', location: 'top-menu'} ) #{translate('sign_up')} + // templates link + if settings.templates + +nav-item + +nav-link( + href="/templates" + event-tracking="menu-click" + event-tracking-action="clicked" + event-tracking-trigger="click" + event-tracking-mb="true" + event-segmentation={ page: currentUrl, item: 'templates', location: 'top-menu' } + ) #{translate('templates')} + // login link +nav-item +nav-link( diff --git a/services/web/app/views/layout/navbar-marketing.pug b/services/web/app/views/layout/navbar-marketing.pug index bb26ff8d40..64c3221c7b 100644 --- a/services/web/app/views/layout/navbar-marketing.pug +++ b/services/web/app/views/layout/navbar-marketing.pug @@ -159,6 +159,18 @@ nav.navbar.navbar-default.navbar-main( // logged out if !getSessionUser() + // templates link + if settings.templates + li + a( + href="/templates" + event-tracking="menu-click" + event-tracking-action="clicked" + event-tracking-trigger="click" + event-tracking-mb="true" + event-segmentation={ page: currentUrl, item: 'templates', location: 'top-menu' } + ) #{translate('templates')} + // register link if hasFeature('registration-page') li.primary diff --git a/services/web/app/views/project/editor/new_from_template.pug b/services/web/app/views/project/editor/new_from_template.pug index c84288a21a..6d35913141 100644 --- a/services/web/app/views/project/editor/new_from_template.pug +++ b/services/web/app/views/project/editor/new_from_template.pug @@ -2,7 +2,7 @@ extends ../../layout-marketing block vars - var suppressFooter = true - - var suppressCookieBanner = true + - var suppressPugCookieBanner = true - var suppressSkipToContent = true block content @@ -29,8 +29,10 @@ block content input(type="hidden" name="templateVersionId" value=templateVersionId) input(type="hidden" name="templateName" value=name) input(type="hidden" name="compiler" value=compiler) - input(type="hidden" name="imageName" value=imageName) + if imageName + input(type="hidden" name="imageName" value=imageName) input(type="hidden" name="mainFile" value=mainFile) + input(type="hidden" name="language" value=language) if brandVariationId input(type="hidden" name="brandVariationId" value=brandVariationId) input(hidden type="submit") diff --git a/services/web/app/views/project/ide-react-detached.pug b/services/web/app/views/project/ide-react-detached.pug index ca1a178bbf..fa695b1af5 100644 --- a/services/web/app/views/project/ide-react-detached.pug +++ b/services/web/app/views/project/ide-react-detached.pug @@ -7,7 +7,7 @@ block vars - var suppressNavbar = true - var suppressFooter = true - var suppressSkipToContent = true - - var suppressCookieBanner = true + - var suppressPugCookieBanner = true - metadata.robotsNoindexNofollow = true block content diff --git a/services/web/app/views/project/list-react.pug b/services/web/app/views/project/list-react.pug index 78103e75a6..47bff344b6 100644 --- a/services/web/app/views/project/list-react.pug +++ b/services/web/app/views/project/list-react.pug @@ -7,6 +7,7 @@ block vars - const suppressNavContentLinks = true - const suppressNavbar = true - const suppressFooter = true + - const suppressPugCookieBanner = true block append meta meta( diff --git a/services/web/app/views/project/token/access-react.pug b/services/web/app/views/project/token/access-react.pug index 80b91f1a99..6c01ad15b1 100644 --- a/services/web/app/views/project/token/access-react.pug +++ b/services/web/app/views/project/token/access-react.pug @@ -5,7 +5,7 @@ block entrypointVar block vars - var suppressFooter = true - - var suppressCookieBanner = true + - var suppressPugCookieBanner = true - var suppressSkipToContent = true block append meta diff --git a/services/web/app/views/project/token/sharing-updates.pug b/services/web/app/views/project/token/sharing-updates.pug index d1818be0af..2f67e5a3c1 100644 --- a/services/web/app/views/project/token/sharing-updates.pug +++ b/services/web/app/views/project/token/sharing-updates.pug @@ -5,7 +5,7 @@ block entrypointVar block vars - var suppressFooter = true - - var suppressCookieBanner = true + - var suppressPugCookieBanner = true - var suppressSkipToContent = true block append meta diff --git a/services/web/app/views/template_gallery/template-gallery.pug b/services/web/app/views/template_gallery/template-gallery.pug new file mode 100644 index 0000000000..3838d30606 --- /dev/null +++ b/services/web/app/views/template_gallery/template-gallery.pug @@ -0,0 +1,18 @@ +extends ../layout-react + +block entrypointVar + - entrypoint = 'pages/template-gallery' + +block vars +block vars + - const suppressNavContentLinks = true + - const suppressNavbar = true + - const suppressFooter = true + - bootstrap5PageStatus = 'enabled' // One of 'disabled', 'enabled', and 'queryStringOnly' + - isWebsiteRedesign = false + +block append meta + meta(name="ol-templateCategory" data-type="string" content=category) + +block content + #template-gallery-root diff --git a/services/web/app/views/template_gallery/template.pug b/services/web/app/views/template_gallery/template.pug new file mode 100644 index 0000000000..e56fd8d2e5 --- /dev/null +++ b/services/web/app/views/template_gallery/template.pug @@ -0,0 +1,20 @@ +extends ../layout-react + +block entrypointVar + - entrypoint = 'pages/template' + +block vars + - const suppressNavContentLinks = true + - const suppressNavbar = true + - const suppressFooter = true + - bootstrap5PageStatus = 'enabled' // One of 'disabled', 'enabled', and 'queryStringOnly' + - isWebsiteRedesign = false + +block append meta + meta(name="ol-template" data-type="json" content=template) + meta(name="ol-languages" data-type="json" content=languages) + meta(name="ol-userIsAdmin" data-type="boolean" content=hasAdminAccess()) + +block content + #template-root + diff --git a/services/web/app/views/user/login.pug b/services/web/app/views/user/login.pug index 03112a0e16..907e45dbb7 100644 --- a/services/web/app/views/user/login.pug +++ b/services/web/app/views/user/login.pug @@ -24,9 +24,9 @@ block content .form-group input.form-control( name='email' - type='email' + type=(settings.ldap && settings.ldap.enable) ? 'text' : 'email' required - placeholder='email@example.com' + placeholder=(settings.ldap && settings.ldap.enable) ? settings.ldap.placeholder : 'email@example.com' autofocus='true' ) .form-group @@ -44,3 +44,21 @@ block content if login_support_text hr p.text-center !{login_support_text} + if settings.saml && settings.saml.enable + .actions(style='margin-top: 30px;') + a.button.btn-secondary.btn( + href='/saml/login', + style="width: 100%;" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{settings.saml.identityServiceName} + span(hidden data-ol-inflight="pending") #{translate("logging_in")}… + if settings.oidc && settings.oidc.enable + .actions(style='margin-top: 30px;') + a.button.btn-secondary.btn( + href='/oidc/login', + style="width: 100%;" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{settings.oidc.identityServiceName} + span(hidden data-ol-inflight="pending") #{translate("logging_in")}… diff --git a/services/web/app/views/user/passwordReset.pug b/services/web/app/views/user/passwordReset.pug index f498baff23..b6cd42c840 100644 --- a/services/web/app/views/user/passwordReset.pug +++ b/services/web/app/views/user/passwordReset.pug @@ -50,7 +50,7 @@ block content +notification({ariaLive: 'assertive', type: 'error', className: 'mb-3', content: translate(error)}) div(data-ol-custom-form-message='no-password-allowed-due-to-sso' hidden) - +notification({ariaLive: 'polite', type: 'error', className: 'mb-3', content: translate('you_cant_reset_password_due_to_sso', {}, [{name: 'a', attrs: {href: '/sso-login'}}])}) + +notification({ariaLive: 'polite', type: 'error', className: 'mb-3', content: translate('you_cant_reset_password_due_to_ldap_or_sso')}) input(name='_csrf' type='hidden' value=csrfToken) .form-group.mb-3 label.form-label(for='email') #{translate("email")} diff --git a/services/web/config/settings.defaults.js b/services/web/config/settings.defaults.js index bd0730d5d0..ca501daf51 100644 --- a/services/web/config/settings.defaults.js +++ b/services/web/config/settings.defaults.js @@ -264,6 +264,9 @@ module.exports = { notifications: { url: `http://${process.env.NOTIFICATIONS_HOST || '127.0.0.1'}:3042`, }, + references: { + url: `http://${process.env.REFERENCES_HOST || '127.0.0.1'}:3056`, + }, webpack: { url: `http://${process.env.WEBPACK_HOST || '127.0.0.1'}:3808`, }, @@ -440,6 +443,9 @@ module.exports = { ',' ), + filestoreMigrationLevel: + parseInt(process.env.OVERLEAF_FILESTORE_MIGRATION_LEVEL, 10) || 0, + // i18n // ------ // @@ -979,7 +985,7 @@ module.exports = { pdfPreviewPromotions: [], diagnosticActions: [], sourceEditorCompletionSources: [], - sourceEditorSymbolPalette: [], + sourceEditorSymbolPalette: ['@/features/symbol-palette/components/symbol-palette'], sourceEditorToolbarComponents: [], mainEditorLayoutModals: [], langFeedbackLinkingWidgets: [], @@ -989,7 +995,7 @@ module.exports = { importProjectFromGithubModalWrapper: [], importProjectFromGithubMenu: [], editorLeftMenuSync: [], - editorLeftMenuManageTemplate: [], + editorLeftMenuManageTemplate: ['@/features/editor-left-menu/components/actions-manage-template'], oauth2Server: [], managedGroupSubscriptionEnrollmentNotification: [], managedGroupEnrollmentInvite: [], @@ -1030,6 +1036,13 @@ module.exports = { 'launchpad', 'server-ce-scripts', 'user-activate', + 'sandboxed-compiles', + 'symbol-palette', + 'track-changes', + 'authentication/ldap', + 'authentication/saml', + 'authentication/oidc', + 'template-gallery', ], viewIncludes: {}, @@ -1056,6 +1069,20 @@ module.exports = { managedUsers: { enabled: false, }, + + oauthProviders: { + ...(process.env.EXTERNAL_AUTH && process.env.EXTERNAL_AUTH.includes('oidc') && { + [process.env.OVERLEAF_OIDC_PROVIDER_ID || 'oidc']: { + name: process.env.OVERLEAF_OIDC_PROVIDER_NAME || 'OIDC Provider', + descriptionKey: process.env.OVERLEAF_OIDC_PROVIDER_DESCRIPTION, + descriptionOptions: { link: process.env.OVERLEAF_OIDC_PROVIDER_INFO_LINK }, + hideWhenNotLinked: process.env.OVERLEAF_OIDC_PROVIDER_HIDE_NOT_LINKED ? + process.env.OVERLEAF_OIDC_PROVIDER_HIDE_NOT_LINKED.toLowerCase() === 'true' : undefined, + linkPath: '/oidc/login', + }, + }), + }, + } module.exports.mergeWith = function (overrides) { diff --git a/services/web/docker-compose.ci.yml b/services/web/docker-compose.ci.yml index 33b5a3ca2e..8376103315 100644 --- a/services/web/docker-compose.ci.yml +++ b/services/web/docker-compose.ci.yml @@ -95,7 +95,7 @@ services: image: redis:7.4.3 mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 logging: driver: none command: --replSet overleaf diff --git a/services/web/docker-compose.yml b/services/web/docker-compose.yml index 069c1e77de..e0a4a064c5 100644 --- a/services/web/docker-compose.yml +++ b/services/web/docker-compose.yml @@ -91,7 +91,7 @@ services: image: redis:7.4.3 mongo: - image: mongo:7.0.20 + image: mongo:8.0.11 command: --replSet overleaf volumes: - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js diff --git a/services/web/frontend/extracted-translations.json b/services/web/frontend/extracted-translations.json index ef2a9c6a2c..0d575ef916 100644 --- a/services/web/frontend/extracted-translations.json +++ b/services/web/frontend/extracted-translations.json @@ -26,6 +26,7 @@ "about_to_delete_cert": "", "about_to_delete_projects": "", "about_to_delete_tag": "", + "about_to_delete_template": "", "about_to_delete_the_following_project": "", "about_to_delete_the_following_projects": "", "about_to_delete_user_preamble": "", @@ -35,6 +36,7 @@ "about_to_remove_user_preamble": "", "about_to_trash_projects": "", "abstract": "", + "accept_all_cookies": "", "accept_and_continue": "", "accept_change": "", "accept_change_error_description": "", @@ -129,6 +131,7 @@ "all_premium_features_including": "", "all_projects": "", "all_projects_will_be_transferred_immediately": "", + "all_templates": "", "all_these_experiments_are_available_exclusively": "", "allows_to_search_by_author_title_etc_possible_to_pull_results_directly_from_your_reference_manager_if_connected": "", "an_email_has_already_been_sent_to": "", @@ -159,6 +162,7 @@ "ask_repo_owner_to_reconnect": "", "ask_repo_owner_to_renew_overleaf_subscription": "", "at_most_x_libraries_can_be_selected": "", + "author": "", "auto_close_brackets": "", "auto_compile": "", "auto_complete": "", @@ -222,6 +226,8 @@ "card_must_be_authenticated_by_3dsecure": "", "card_payment": "", "careers": "", + "categories": "", + "category": "", "category_arrows": "", "category_greek": "", "category_misc": "", @@ -332,6 +338,8 @@ "continue_to": "", "continue_using_free_features": "", "continue_with_free_plan": "", + "cookie_banner": "", + "cookie_banner_info": "", "copied": "", "copy": "", "copy_code": "", @@ -363,6 +371,7 @@ "customize_your_group_subscription": "", "customizing_figures": "", "customizing_tables": "", + "date": "", "date_and_owner": "", "dealing_with_errors": "", "decrease_indent": "", @@ -388,6 +397,7 @@ "delete_sso_config": "", "delete_table": "", "delete_tag": "", + "delete_template": "", "delete_token": "", "delete_user": "", "delete_your_account": "", @@ -488,6 +498,7 @@ "edit_figure": "", "edit_sso_configuration": "", "edit_tag": "", + "edit_template": "", "edit_your_custom_dictionary": "", "editing": "", "editing_captions": "", @@ -544,6 +555,7 @@ "error_opening_document_detail": "", "error_performing_request": "", "error_processing_file": "", + "essential_cookies_only": "", "example_project": "", "existing_plan_active_until_term_end": "", "expand": "", @@ -863,6 +875,7 @@ "invalid_password_too_similar": "", "invalid_regular_expression": "", "invalid_request": "", + "invalid_upload_request": "", "invite": "", "invite_expired": "", "invite_more_collabs": "", @@ -908,6 +921,7 @@ "last_name": "", "last_resort_trouble_shooting_guide": "", "last_suggested_fix": "", + "last_updated": "", "last_updated_date_by_x": "", "last_used": "", "latam_discount_modal_info": "", @@ -916,6 +930,8 @@ "latex_in_thirty_minutes": "", "latex_places_figures_according_to_a_special_algorithm": "", "latex_places_tables_according_to_a_special_algorithm": "", + "latex_templates": "", + "latex_templates_for_journal_articles": "", "layout": "", "layout_options": "", "layout_processing": "", @@ -938,7 +954,8 @@ "let_us_know_what_you_think": "", "lets_get_those_premium_features": "", "library": "", - "licenses": "", + "license": "", + "license_for_educational_purposes_confirmation": "", "limited_document_history": "", "limited_offer": "", "limited_to_n_collaborators_per_project": "", @@ -1125,6 +1142,7 @@ "no_selection_select_file": "", "no_symbols_found": "", "no_thanks_cancel_now": "", + "no_templates_found": "", "normal": "", "normally_x_price_per_month": "", "normally_x_price_per_year": "", @@ -1156,6 +1174,7 @@ "only_importer_can_refresh": "", "open_action_menu": "", "open_advanced_reference_search": "", + "open_as_template": "", "open_file": "", "open_link": "", "open_path": "", @@ -1179,6 +1198,7 @@ "overleaf_is_easy_to_use": "", "overleaf_labs": "", "overleaf_logo": "", + "overleaf_template_gallery": "", "overleafs_functionality_meets_my_needs": "", "overview": "", "overwrite": "", @@ -1245,6 +1265,7 @@ "please_change_primary_to_remove": "", "please_check_your_inbox_to_confirm": "", "please_compile_pdf_before_download": "", + "please_compile_pdf_before_publish_as_template": "", "please_compile_pdf_before_word_count": "", "please_confirm_primary_email_or_edit": "", "please_confirm_secondary_email_or_edit": "", @@ -1277,6 +1298,7 @@ "premium_feature": "", "premium_plan_label": "", "presentation_mode": "", + "prev": "", "previous_page": "", "price": "", "primarily_work_study_question": "", @@ -1738,6 +1760,7 @@ "tell_the_project_owner_and_ask_them_to_upgrade": "", "template": "", "template_description": "", + "template_gallery": "", "template_title_taken_from_project_title": "", "templates": "", "temporarily_hides_the_preview": "", @@ -2140,6 +2163,7 @@ "you_can_select_or_invite_collaborator": "", "you_can_select_or_invite_collaborator_plural": "", "you_can_still_use_your_premium_features": "", + "you_cant_add_or_change_password_due_to_ldap_or_sso": "", "you_cant_add_or_change_password_due_to_sso": "", "you_cant_join_this_group_subscription": "", "you_dont_have_any_add_ons_on_your_account": "", diff --git a/services/web/frontend/js/features/cookie-banner/index.js b/services/web/frontend/js/features/cookie-banner/index.js deleted file mode 100644 index 3d9b2b8d6c..0000000000 --- a/services/web/frontend/js/features/cookie-banner/index.js +++ /dev/null @@ -1,53 +0,0 @@ -import getMeta from '@/utils/meta' - -function loadGA() { - if (window.olLoadGA) { - window.olLoadGA() - } -} - -function setConsent(value) { - document.querySelector('.cookie-banner').classList.add('hidden') - const cookieDomain = getMeta('ol-ExposedSettings').cookieDomain - const oneYearInSeconds = 60 * 60 * 24 * 365 - const cookieAttributes = - '; path=/' + - '; domain=' + - cookieDomain + - '; max-age=' + - oneYearInSeconds + - '; SameSite=Lax; Secure' - if (value === 'all') { - document.cookie = 'oa=1' + cookieAttributes - loadGA() - window.dispatchEvent(new CustomEvent('cookie-consent', { detail: true })) - } else { - document.cookie = 'oa=0' + cookieAttributes - window.dispatchEvent(new CustomEvent('cookie-consent', { detail: false })) - } -} - -if ( - getMeta('ol-ExposedSettings').gaToken || - getMeta('ol-ExposedSettings').gaTokenV4 || - getMeta('ol-ExposedSettings').propensityId || - getMeta('ol-ExposedSettings').hotjarId -) { - document - .querySelectorAll('[data-ol-cookie-banner-set-consent]') - .forEach(el => { - el.addEventListener('click', function (e) { - e.preventDefault() - const consentType = el.getAttribute('data-ol-cookie-banner-set-consent') - setConsent(consentType) - }) - }) - - const oaCookie = document.cookie.split('; ').find(c => c.startsWith('oa=')) - if (!oaCookie) { - const cookieBannerEl = document.querySelector('.cookie-banner') - if (cookieBannerEl) { - cookieBannerEl.classList.remove('hidden') - } - } -} diff --git a/services/web/frontend/js/features/cookie-banner/index.ts b/services/web/frontend/js/features/cookie-banner/index.ts new file mode 100644 index 0000000000..2ea97e875a --- /dev/null +++ b/services/web/frontend/js/features/cookie-banner/index.ts @@ -0,0 +1,32 @@ +import { + CookieConsentValue, + cookieBannerRequired, + hasMadeCookieChoice, + setConsent, +} from '@/features/cookie-banner/utils' + +function toggleCookieBanner(hidden: boolean) { + const cookieBannerEl = document.querySelector('.cookie-banner') + if (cookieBannerEl) { + cookieBannerEl.classList.toggle('hidden', hidden) + } +} + +if (cookieBannerRequired()) { + document + .querySelectorAll('[data-ol-cookie-banner-set-consent]') + .forEach(el => { + el.addEventListener('click', function (e) { + e.preventDefault() + toggleCookieBanner(true) + const consentType = el.getAttribute( + 'data-ol-cookie-banner-set-consent' + ) as CookieConsentValue | null + setConsent(consentType) + }) + }) + + if (!hasMadeCookieChoice()) { + toggleCookieBanner(false) + } +} diff --git a/services/web/frontend/js/features/cookie-banner/utils.ts b/services/web/frontend/js/features/cookie-banner/utils.ts new file mode 100644 index 0000000000..5c045d4e71 --- /dev/null +++ b/services/web/frontend/js/features/cookie-banner/utils.ts @@ -0,0 +1,43 @@ +import getMeta from '@/utils/meta' + +export type CookieConsentValue = 'all' | 'essential' + +function loadGA() { + if (window.olLoadGA) { + window.olLoadGA() + } +} + +export function setConsent(value: CookieConsentValue | null) { + const cookieDomain = getMeta('ol-ExposedSettings').cookieDomain + const oneYearInSeconds = 60 * 60 * 24 * 365 + const cookieAttributes = + '; path=/' + + '; domain=' + + cookieDomain + + '; max-age=' + + oneYearInSeconds + + '; SameSite=Lax; Secure' + if (value === 'all') { + document.cookie = 'oa=1' + cookieAttributes + loadGA() + window.dispatchEvent(new CustomEvent('cookie-consent', { detail: true })) + } else { + document.cookie = 'oa=0' + cookieAttributes + window.dispatchEvent(new CustomEvent('cookie-consent', { detail: false })) + } +} + +export function cookieBannerRequired() { + const exposedSettings = getMeta('ol-ExposedSettings') + return Boolean( + exposedSettings.gaToken || + exposedSettings.gaTokenV4 || + exposedSettings.propensityId || + exposedSettings.hotjarId + ) +} + +export function hasMadeCookieChoice() { + return document.cookie.split('; ').some(c => c.startsWith('oa=')) +} diff --git a/services/web/frontend/js/features/editor-left-menu/components/actions-manage-template.tsx b/services/web/frontend/js/features/editor-left-menu/components/actions-manage-template.tsx new file mode 100644 index 0000000000..f277111a91 --- /dev/null +++ b/services/web/frontend/js/features/editor-left-menu/components/actions-manage-template.tsx @@ -0,0 +1,72 @@ +import { useCallback, useState } from 'react' +import { useTranslation } from 'react-i18next' +import * as eventTracking from '../../../infrastructure/event-tracking' +import getMeta from '../../../utils/meta' +import OLTooltip from '@/features/ui/components/ol/ol-tooltip' +import { useDetachCompileContext } from '../../../shared/context/detach-compile-context' +import EditorManageTemplateModalWrapper from '../../template/components/manage-template-modal/editor-manage-template-modal-wrapper' +import LeftMenuButton from './left-menu-button' + +type TemplateManageResponse = { + template_id: string +} + +export default function ActionsManageTemplate() { + + const templatesAdmin = getMeta('ol-showTemplatesServerPro') + if (!templatesAdmin) { + return null + } + + const [showModal, setShowModal] = useState(false) + const { pdfFile } = useDetachCompileContext() + const { t } = useTranslation() + + const handleShowModal = useCallback(() => { + eventTracking.sendMB('left-menu-template') + setShowModal(true) + }, []) + + const openTemplate = useCallback( + ({ template_id: templateId }: TemplateManageResponse) => { + location.assign(`/template/${templateId}`) + }, + [location] + ) + + return ( + <> + {pdfFile ? ( + + {t('publish_as_template')} + + ) : ( + + {/* OverlayTrigger won't fire unless the child is a non-react html element (e.g div, span) */} +
+ + {t('publish_as_template')} + +
+
+ )} + setShowModal(false)} + openTemplate={openTemplate} + /> + + ) +} diff --git a/services/web/frontend/js/features/file-tree/components/file-tree-create/error-message.tsx b/services/web/frontend/js/features/file-tree/components/file-tree-create/error-message.tsx index 02cc083928..244ef1a76b 100644 --- a/services/web/frontend/js/features/file-tree/components/file-tree-create/error-message.tsx +++ b/services/web/frontend/js/features/file-tree/components/file-tree-create/error-message.tsx @@ -1,4 +1,4 @@ -import { useTranslation } from 'react-i18next' +import { useTranslation, Trans } from 'react-i18next' import { FetchError } from '../../../../infrastructure/fetch-json' import RedirectToLogin from './redirect-to-login' import { @@ -7,6 +7,7 @@ import { InvalidFilenameError, } from '../../errors' import DangerMessage from './danger-message' +import getMeta from '@/utils/meta' // TODO: Update the error type when we properly type FileTreeActionableContext export default function ErrorMessage({ @@ -15,6 +16,7 @@ export default function ErrorMessage({ error: string | Record }) { const { t } = useTranslation() + const { isOverleaf } = getMeta('ol-ExposedSettings') const fileNameLimit = 150 // the error is a string @@ -46,6 +48,22 @@ export default function ErrorMessage({ ) + case 'invalid_upload_request': + if (!isOverleaf) { + return ( + {t('generic_something_went_wrong')} + ) + } + return ( + + ]} + /> + + ) + case 'duplicate_file_name': return ( diff --git a/services/web/frontend/js/features/ide-redesign/components/chat/message.tsx b/services/web/frontend/js/features/ide-redesign/components/chat/message.tsx index 9a4ffe3a1b..6822db39da 100644 --- a/services/web/frontend/js/features/ide-redesign/components/chat/message.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/chat/message.tsx @@ -1,15 +1,14 @@ import { MessageProps } from '@/features/chat/components/message' import { User } from '../../../../../../types/user' -import { getHueForUserId } from '@/shared/utils/colors' +import { + getBackgroundColorForUserId, + hslStringToLuminance, +} from '@/shared/utils/colors' import MessageContent from '@/features/chat/components/message-content' import classNames from 'classnames' import MaterialIcon from '@/shared/components/material-icon' import { t } from 'i18next' -function hue(user?: User) { - return user ? getHueForUserId(user.id) : 0 -} - function getAvatarStyle(user?: User) { if (!user?.id) { // Deleted user @@ -20,9 +19,15 @@ function getAvatarStyle(user?: User) { } } + const backgroundColor = getBackgroundColorForUserId(user.id) + return { - borderColor: `hsl(${hue(user)}, 85%, 40%)`, - backgroundColor: `hsl(${hue(user)}, 85%, 40%`, + borderColor: backgroundColor, + backgroundColor, + color: + hslStringToLuminance(backgroundColor) < 0.5 + ? 'var(--content-primary-dark)' + : 'var(--content-primary)', } } diff --git a/services/web/frontend/js/features/ide-redesign/components/online-users/online-users-widget.tsx b/services/web/frontend/js/features/ide-redesign/components/online-users/online-users-widget.tsx index 07aaa647a9..2d30297e51 100644 --- a/services/web/frontend/js/features/ide-redesign/components/online-users/online-users-widget.tsx +++ b/services/web/frontend/js/features/ide-redesign/components/online-users/online-users-widget.tsx @@ -7,7 +7,11 @@ import { DropdownToggle, } from '@/features/ui/components/bootstrap-5/dropdown-menu' import OLTooltip from '@/features/ui/components/ol/ol-tooltip' -import { getBackgroundColorForUserId } from '@/shared/utils/colors' +import { + getBackgroundColorForUserId, + hslStringToLuminance, +} from '@/shared/utils/colors' +import classNames from 'classnames' import { useCallback, useMemo } from 'react' import { useTranslation } from 'react-i18next' @@ -86,9 +90,16 @@ const OnlineUserWidget = ({ const OnlineUserCircle = ({ user }: { user: OnlineUser }) => { const backgroundColor = getBackgroundColorForUserId(user.user_id) + const luminance = hslStringToLuminance(backgroundColor) const [character] = [...user.name] return ( - + = 0.5, + })} + style={{ backgroundColor }} + > {character} ) diff --git a/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx b/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx index 3d24f9845c..07319ffaf1 100644 --- a/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx +++ b/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx @@ -20,6 +20,7 @@ import Footer from '@/features/ui/components/bootstrap-5/footer/footer' import SidebarDsNav from '@/features/project-list/components/sidebar/sidebar-ds-nav' import SystemMessages from '@/shared/components/system-messages' import overleafLogo from '@/shared/svgs/overleaf-a-ds-solution-mallard.svg' +import CookieBanner from '@/shared/components/cookie-banner' export function ProjectListDsNav() { const navbarProps = getMeta('ol-navbar') @@ -125,6 +126,7 @@ export function ProjectListDsNav() {