mirror of
https://github.com/yu-i-i/overleaf-cep.git
synced 2025-07-25 02:00:07 +02:00
Compare commits
40 commits
ext-ce
...
v5.5.0-ext
Author | SHA1 | Date | |
---|---|---|---|
![]() |
8c027569b1 | ||
![]() |
5a46224b9d | ||
![]() |
bd947329dc | ||
![]() |
d3e5cf92c6 | ||
![]() |
3adbf60d70 | ||
![]() |
9fd3485d3e | ||
![]() |
f54cafb39f | ||
![]() |
1de1914025 | ||
![]() |
1b107cc9ff | ||
![]() |
e178d934d5 | ||
![]() |
1eeda6fc7a | ||
![]() |
72421492b4 | ||
![]() |
6609d893b1 | ||
![]() |
72073f6f51 | ||
![]() |
de18677136 | ||
![]() |
0b60c26765 | ||
![]() |
da2e5fc5b4 | ||
![]() |
2cc359c824 | ||
![]() |
157ad24cc6 | ||
![]() |
60cca48561 | ||
![]() |
4d3ef529d7 | ||
![]() |
8eaf6950c6 | ||
![]() |
b407700a55 | ||
![]() |
400d800a1a | ||
![]() |
fd9c29ba09 | ||
![]() |
21b20cd86d | ||
![]() |
0371e8d956 | ||
![]() |
b993eda1f1 | ||
![]() |
8590e180d3 | ||
![]() |
5a6ed3334f | ||
![]() |
320999b0d8 | ||
![]() |
fcb5926db8 | ||
![]() |
04e0acbce5 | ||
![]() |
4df5135936 | ||
![]() |
504590d129 | ||
![]() |
6f8c951b7d | ||
![]() |
6282e4b0eb | ||
![]() |
928a514705 | ||
![]() |
884e7d81c8 | ||
![]() |
1c499496c6 |
1313 changed files with 34327 additions and 55113 deletions
|
@ -1,19 +1,10 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Report a bug
|
||||
title: ''
|
||||
labels: type:bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
Note: If you are using www.overleaf.com and have a problem,
|
||||
Note: If you are using www.overleaf.com and have a problem,
|
||||
or if you would like to request a new feature please contact
|
||||
the support team at support@overleaf.com
|
||||
|
||||
This form should only be used to report bugs in the
|
||||
|
||||
This form should only be used to report bugs in the
|
||||
Community Edition release of Overleaf.
|
||||
|
||||
-->
|
|
@ -42,7 +42,7 @@ To do this, use the included `bin/dev` script:
|
|||
bin/dev
|
||||
```
|
||||
|
||||
This will start all services using `node --watch`, which will automatically monitor the code and restart the services as necessary.
|
||||
This will start all services using `nodemon`, which will automatically monitor the code and restart the services as necessary.
|
||||
|
||||
To improve performance, you can start only a subset of the services in development mode by providing a space-separated list to the `bin/dev` script:
|
||||
|
||||
|
|
|
@ -6,18 +6,15 @@ DOCUMENT_UPDATER_HOST=document-updater
|
|||
FILESTORE_HOST=filestore
|
||||
GRACEFUL_SHUTDOWN_DELAY_SECONDS=0
|
||||
HISTORY_V1_HOST=history-v1
|
||||
HISTORY_REDIS_HOST=redis
|
||||
LISTEN_ADDRESS=0.0.0.0
|
||||
MONGO_HOST=mongo
|
||||
MONGO_URL=mongodb://mongo/sharelatex?directConnection=true
|
||||
NOTIFICATIONS_HOST=notifications
|
||||
PROJECT_HISTORY_HOST=project-history
|
||||
QUEUES_REDIS_HOST=redis
|
||||
REALTIME_HOST=real-time
|
||||
REDIS_HOST=redis
|
||||
REFERENCES_HOST=references
|
||||
SESSION_SECRET=foo
|
||||
V1_HISTORY_HOST=history-v1
|
||||
WEBPACK_HOST=webpack
|
||||
WEB_API_PASSWORD=overleaf
|
||||
WEB_API_USER=overleaf
|
||||
|
|
|
@ -124,7 +124,7 @@ services:
|
|||
- ../services/references/app.js:/overleaf/services/references/app.js
|
||||
|
||||
web:
|
||||
command: ["node", "--watch", "app.mjs", "--watch-locales"]
|
||||
command: ["node", "--watch", "app.js", "--watch-locales"]
|
||||
environment:
|
||||
- NODE_OPTIONS=--inspect=0.0.0.0:9229
|
||||
ports:
|
||||
|
|
|
@ -25,10 +25,10 @@ services:
|
|||
env_file:
|
||||
- dev.env
|
||||
environment:
|
||||
- DOCKER_RUNNER=true
|
||||
- TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full
|
||||
- SANDBOXED_COMPILES=true
|
||||
- SANDBOXED_COMPILES_HOST_DIR_COMPILES=${PWD}/compiles
|
||||
- SANDBOXED_COMPILES_HOST_DIR_OUTPUT=${PWD}/output
|
||||
- COMPILES_HOST_DIR=${PWD}/compiles
|
||||
- OUTPUT_HOST_DIR=${PWD}/output
|
||||
user: root
|
||||
volumes:
|
||||
- ${PWD}/compiles:/overleaf/services/clsi/compiles
|
||||
|
|
BIN
doc/logo.png
BIN
doc/logo.png
Binary file not shown.
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 71 KiB |
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
access-token-encryptor
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
fetch-utils
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
logger
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
metrics
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
mongo-utils
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
o-error
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
object-persistor
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -33,10 +33,6 @@ const AES256_KEY_LENGTH = 32
|
|||
* @property {() => Promise<Array<RootKeyEncryptionKey>>} getRootKeyEncryptionKeys
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('./types').ListDirectoryResult} ListDirectoryResult
|
||||
*/
|
||||
|
||||
/**
|
||||
* Helper function to make TS happy when accessing error properties
|
||||
* AWSError is not an actual class, so we cannot use instanceof.
|
||||
|
@ -395,9 +391,9 @@ class PerProjectEncryptedS3Persistor extends S3Persistor {
|
|||
* A general "cache" for project keys is another alternative. For now, use a helper class.
|
||||
*/
|
||||
class CachedPerProjectEncryptedS3Persistor {
|
||||
/** @type SSECOptions */
|
||||
/** @type SSECOptions */
|
||||
#projectKeyOptions
|
||||
/** @type PerProjectEncryptedS3Persistor */
|
||||
/** @type PerProjectEncryptedS3Persistor */
|
||||
#parent
|
||||
|
||||
/**
|
||||
|
@ -428,16 +424,6 @@ class CachedPerProjectEncryptedS3Persistor {
|
|||
return await this.#parent.getObjectSize(bucketName, path)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} bucketName
|
||||
* @param {string} path
|
||||
* @return {Promise<ListDirectoryResult>}
|
||||
*/
|
||||
async listDirectory(bucketName, path) {
|
||||
return await this.#parent.listDirectory(bucketName, path)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} bucketName
|
||||
* @param {string} path
|
||||
|
|
|
@ -20,18 +20,6 @@ const { URL } = require('node:url')
|
|||
const { WriteError, ReadError, NotFoundError } = require('./Errors')
|
||||
const zlib = require('node:zlib')
|
||||
|
||||
/**
|
||||
* @typedef {import('aws-sdk/clients/s3').ListObjectsV2Output} ListObjectsV2Output
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('aws-sdk/clients/s3').Object} S3Object
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('./types').ListDirectoryResult} ListDirectoryResult
|
||||
*/
|
||||
|
||||
/**
|
||||
* Wrapper with private fields to avoid revealing them on console, JSON.stringify or similar.
|
||||
*/
|
||||
|
@ -278,12 +266,26 @@ class S3Persistor extends AbstractPersistor {
|
|||
* @return {Promise<void>}
|
||||
*/
|
||||
async deleteDirectory(bucketName, key, continuationToken) {
|
||||
const { contents, response } = await this.listDirectory(
|
||||
bucketName,
|
||||
key,
|
||||
continuationToken
|
||||
)
|
||||
const objects = contents.map(item => ({ Key: item.Key || '' }))
|
||||
let response
|
||||
const options = { Bucket: bucketName, Prefix: key }
|
||||
if (continuationToken) {
|
||||
options.ContinuationToken = continuationToken
|
||||
}
|
||||
|
||||
try {
|
||||
response = await this._getClientForBucket(bucketName)
|
||||
.listObjectsV2(options)
|
||||
.promise()
|
||||
} catch (err) {
|
||||
throw PersistorHelper.wrapError(
|
||||
err,
|
||||
'failed to list objects in S3',
|
||||
{ bucketName, key },
|
||||
ReadError
|
||||
)
|
||||
}
|
||||
|
||||
const objects = response.Contents?.map(item => ({ Key: item.Key || '' }))
|
||||
if (objects?.length) {
|
||||
try {
|
||||
await this._getClientForBucket(bucketName)
|
||||
|
@ -314,36 +316,6 @@ class S3Persistor extends AbstractPersistor {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} bucketName
|
||||
* @param {string} key
|
||||
* @param {string} [continuationToken]
|
||||
* @return {Promise<ListDirectoryResult>}
|
||||
*/
|
||||
async listDirectory(bucketName, key, continuationToken) {
|
||||
let response
|
||||
const options = { Bucket: bucketName, Prefix: key }
|
||||
if (continuationToken) {
|
||||
options.ContinuationToken = continuationToken
|
||||
}
|
||||
|
||||
try {
|
||||
response = await this._getClientForBucket(bucketName)
|
||||
.listObjectsV2(options)
|
||||
.promise()
|
||||
} catch (err) {
|
||||
throw PersistorHelper.wrapError(
|
||||
err,
|
||||
'failed to list objects in S3',
|
||||
{ bucketName, key },
|
||||
ReadError
|
||||
)
|
||||
}
|
||||
|
||||
return { contents: response.Contents ?? [], response }
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} bucketName
|
||||
* @param {string} key
|
||||
|
|
6
libraries/object-persistor/src/types.d.ts
vendored
6
libraries/object-persistor/src/types.d.ts
vendored
|
@ -1,6 +0,0 @@
|
|||
import type { ListObjectsV2Output, Object } from 'aws-sdk/clients/s3'
|
||||
|
||||
export type ListDirectoryResult = {
|
||||
contents: Array<Object>
|
||||
response: ListObjectsV2Output
|
||||
}
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
overleaf-editor-core
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -13,7 +13,7 @@ const V2DocVersions = require('./v2_doc_versions')
|
|||
|
||||
/**
|
||||
* @import Author from "./author"
|
||||
* @import { BlobStore, RawChange, ReadonlyBlobStore } from "./types"
|
||||
* @import { BlobStore, RawChange } from "./types"
|
||||
*/
|
||||
|
||||
/**
|
||||
|
@ -219,7 +219,7 @@ class Change {
|
|||
* If this Change contains any File objects, load them.
|
||||
*
|
||||
* @param {string} kind see {File#load}
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async loadFiles(kind, blobStore) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// @ts-check
|
||||
|
||||
/**
|
||||
* @import { ClearTrackingPropsRawData, TrackingDirective } from '../types'
|
||||
* @import { ClearTrackingPropsRawData } from '../types'
|
||||
*/
|
||||
|
||||
class ClearTrackingProps {
|
||||
|
@ -11,27 +11,12 @@ class ClearTrackingProps {
|
|||
|
||||
/**
|
||||
* @param {any} other
|
||||
* @returns {other is ClearTrackingProps}
|
||||
* @returns {boolean}
|
||||
*/
|
||||
equals(other) {
|
||||
return other instanceof ClearTrackingProps
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {TrackingDirective} other
|
||||
* @returns {other is ClearTrackingProps}
|
||||
*/
|
||||
canMergeWith(other) {
|
||||
return other instanceof ClearTrackingProps
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {TrackingDirective} other
|
||||
*/
|
||||
mergeWith(other) {
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {ClearTrackingPropsRawData}
|
||||
*/
|
||||
|
|
|
@ -11,7 +11,7 @@ const EditOperation = require('../operation/edit_operation')
|
|||
const EditOperationBuilder = require('../operation/edit_operation_builder')
|
||||
|
||||
/**
|
||||
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawHashFileData, RawLazyStringFileData } from '../types'
|
||||
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types'
|
||||
*/
|
||||
|
||||
class LazyStringFileData extends FileData {
|
||||
|
@ -159,11 +159,11 @@ class LazyStringFileData extends FileData {
|
|||
|
||||
/** @inheritdoc
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<RawHashFileData>}
|
||||
* @return {Promise<RawFileData>}
|
||||
*/
|
||||
async store(blobStore) {
|
||||
if (this.operations.length === 0) {
|
||||
/** @type RawHashFileData */
|
||||
/** @type RawFileData */
|
||||
const raw = { hash: this.hash }
|
||||
if (this.rangesHash) {
|
||||
raw.rangesHash = this.rangesHash
|
||||
|
@ -171,11 +171,9 @@ class LazyStringFileData extends FileData {
|
|||
return raw
|
||||
}
|
||||
const eager = await this.toEager(blobStore)
|
||||
const raw = await eager.store(blobStore)
|
||||
this.hash = raw.hash
|
||||
this.rangesHash = raw.rangesHash
|
||||
this.operations.length = 0
|
||||
return raw
|
||||
/** @type RawFileData */
|
||||
return await eager.store(blobStore)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ const CommentList = require('./comment_list')
|
|||
const TrackedChangeList = require('./tracked_change_list')
|
||||
|
||||
/**
|
||||
* @import { StringFileRawData, RawHashFileData, BlobStore, CommentRawData } from "../types"
|
||||
* @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types"
|
||||
* @import { TrackedChangeRawData, RangesBlob } from "../types"
|
||||
* @import EditOperation from "../operation/edit_operation"
|
||||
*/
|
||||
|
@ -139,7 +139,7 @@ class StringFileData extends FileData {
|
|||
/**
|
||||
* @inheritdoc
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<RawHashFileData>}
|
||||
* @return {Promise<RawFileData>}
|
||||
*/
|
||||
async store(blobStore) {
|
||||
const blob = await blobStore.putString(this.content)
|
||||
|
|
|
@ -84,21 +84,6 @@ class TrackedChange {
|
|||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an equivalent tracked change whose extent is limited to the given
|
||||
* range
|
||||
*
|
||||
* @param {Range} range
|
||||
* @returns {TrackedChange | null} - the result or null if the intersection is empty
|
||||
*/
|
||||
intersectRange(range) {
|
||||
const intersection = this.range.intersect(range)
|
||||
if (intersection == null) {
|
||||
return null
|
||||
}
|
||||
return new TrackedChange(intersection, this.tracking)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TrackedChange
|
||||
|
|
|
@ -2,11 +2,9 @@
|
|||
const Range = require('../range')
|
||||
const TrackedChange = require('./tracked_change')
|
||||
const TrackingProps = require('../file_data/tracking_props')
|
||||
const { InsertOp, RemoveOp, RetainOp } = require('../operation/scan_op')
|
||||
|
||||
/**
|
||||
* @import { TrackingDirective, TrackedChangeRawData } from "../types"
|
||||
* @import TextOperation from "../operation/text_operation"
|
||||
*/
|
||||
|
||||
class TrackedChangeList {
|
||||
|
@ -60,22 +58,6 @@ class TrackedChangeList {
|
|||
return this._trackedChanges.filter(change => range.contains(change.range))
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns tracked changes that overlap with the given range
|
||||
* @param {Range} range
|
||||
* @returns {TrackedChange[]}
|
||||
*/
|
||||
intersectRange(range) {
|
||||
const changes = []
|
||||
for (const change of this._trackedChanges) {
|
||||
const intersection = change.intersectRange(range)
|
||||
if (intersection != null) {
|
||||
changes.push(intersection)
|
||||
}
|
||||
}
|
||||
return changes
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the tracking props for a given range.
|
||||
* @param {Range} range
|
||||
|
@ -107,8 +89,6 @@ class TrackedChangeList {
|
|||
|
||||
/**
|
||||
* Collapses consecutive (and compatible) ranges
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_mergeRanges() {
|
||||
|
@ -137,28 +117,12 @@ class TrackedChangeList {
|
|||
}
|
||||
|
||||
/**
|
||||
* Apply an insert operation
|
||||
*
|
||||
* @param {number} cursor
|
||||
* @param {string} insertedText
|
||||
* @param {{tracking?: TrackingProps}} opts
|
||||
*/
|
||||
applyInsert(cursor, insertedText, opts = {}) {
|
||||
this._applyInsert(cursor, insertedText, opts)
|
||||
this._mergeRanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply an insert operation
|
||||
*
|
||||
* This method will not merge ranges at the end
|
||||
*
|
||||
* @private
|
||||
* @param {number} cursor
|
||||
* @param {string} insertedText
|
||||
* @param {{tracking?: TrackingProps}} [opts]
|
||||
*/
|
||||
_applyInsert(cursor, insertedText, opts = {}) {
|
||||
const newTrackedChanges = []
|
||||
for (const trackedChange of this._trackedChanges) {
|
||||
if (
|
||||
|
@ -207,29 +171,15 @@ class TrackedChangeList {
|
|||
newTrackedChanges.push(newTrackedChange)
|
||||
}
|
||||
this._trackedChanges = newTrackedChanges
|
||||
this._mergeRanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a delete operation to the list of tracked changes
|
||||
*
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
*/
|
||||
applyDelete(cursor, length) {
|
||||
this._applyDelete(cursor, length)
|
||||
this._mergeRanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a delete operation to the list of tracked changes
|
||||
*
|
||||
* This method will not merge ranges at the end
|
||||
*
|
||||
* @private
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
*/
|
||||
_applyDelete(cursor, length) {
|
||||
const newTrackedChanges = []
|
||||
for (const trackedChange of this._trackedChanges) {
|
||||
const deletedRange = new Range(cursor, length)
|
||||
|
@ -255,31 +205,15 @@ class TrackedChangeList {
|
|||
}
|
||||
}
|
||||
this._trackedChanges = newTrackedChanges
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a retain operation to the list of tracked changes
|
||||
*
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
* @param {{tracking?: TrackingDirective}} [opts]
|
||||
*/
|
||||
applyRetain(cursor, length, opts = {}) {
|
||||
this._applyRetain(cursor, length, opts)
|
||||
this._mergeRanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a retain operation to the list of tracked changes
|
||||
*
|
||||
* This method will not merge ranges at the end
|
||||
*
|
||||
* @private
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
* @param {{tracking?: TrackingDirective}} opts
|
||||
*/
|
||||
_applyRetain(cursor, length, opts = {}) {
|
||||
applyRetain(cursor, length, opts = {}) {
|
||||
// If there's no tracking info, leave everything as-is
|
||||
if (!opts.tracking) {
|
||||
return
|
||||
|
@ -335,31 +269,6 @@ class TrackedChangeList {
|
|||
newTrackedChanges.push(newTrackedChange)
|
||||
}
|
||||
this._trackedChanges = newTrackedChanges
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a text operation to the list of tracked changes
|
||||
*
|
||||
* Ranges are merged only once at the end, for performance and to avoid
|
||||
* problematic edge cases where intermediate ranges get incorrectly merged.
|
||||
*
|
||||
* @param {TextOperation} operation
|
||||
*/
|
||||
applyTextOperation(operation) {
|
||||
// this cursor tracks the destination document that gets modified as
|
||||
// operations are applied to it.
|
||||
let cursor = 0
|
||||
for (const op of operation.ops) {
|
||||
if (op instanceof InsertOp) {
|
||||
this._applyInsert(cursor, op.insertion, { tracking: op.tracking })
|
||||
cursor += op.insertion.length
|
||||
} else if (op instanceof RemoveOp) {
|
||||
this._applyDelete(cursor, op.length)
|
||||
} else if (op instanceof RetainOp) {
|
||||
this._applyRetain(cursor, op.length, { tracking: op.tracking })
|
||||
cursor += op.length
|
||||
}
|
||||
}
|
||||
this._mergeRanges()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,35 +62,6 @@ class TrackingProps {
|
|||
this.ts.getTime() === other.ts.getTime()
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Are these tracking props compatible with the other tracking props for merging
|
||||
* ranges?
|
||||
*
|
||||
* @param {TrackingDirective} other
|
||||
* @returns {other is TrackingProps}
|
||||
*/
|
||||
canMergeWith(other) {
|
||||
if (!(other instanceof TrackingProps)) {
|
||||
return false
|
||||
}
|
||||
return this.type === other.type && this.userId === other.userId
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge two tracking props
|
||||
*
|
||||
* Assumes that `canMerge(other)` returns true
|
||||
*
|
||||
* @param {TrackingDirective} other
|
||||
*/
|
||||
mergeWith(other) {
|
||||
if (!this.canMergeWith(other)) {
|
||||
throw new Error('Cannot merge with incompatible tracking props')
|
||||
}
|
||||
const ts = this.ts <= other.ts ? this.ts : other.ts
|
||||
return new TrackingProps(this.type, this.userId, ts)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TrackingProps
|
||||
|
|
|
@ -7,7 +7,7 @@ const Change = require('./change')
|
|||
const Snapshot = require('./snapshot')
|
||||
|
||||
/**
|
||||
* @import { BlobStore, ReadonlyBlobStore } from "./types"
|
||||
* @import { BlobStore } from "./types"
|
||||
*/
|
||||
|
||||
class History {
|
||||
|
@ -85,7 +85,7 @@ class History {
|
|||
* If this History contains any File objects, load them.
|
||||
*
|
||||
* @param {string} kind see {File#load}
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async loadFiles(kind, blobStore) {
|
||||
|
|
|
@ -13,7 +13,7 @@ let EditFileOperation = null
|
|||
let SetFileMetadataOperation = null
|
||||
|
||||
/**
|
||||
* @import { ReadonlyBlobStore } from "../types"
|
||||
* @import { BlobStore } from "../types"
|
||||
* @import Snapshot from "../snapshot"
|
||||
*/
|
||||
|
||||
|
@ -80,7 +80,7 @@ class Operation {
|
|||
* If this operation references any files, load the files.
|
||||
*
|
||||
* @param {string} kind see {File#load}
|
||||
* @param {ReadOnlyBlobStore} blobStore
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async loadFiles(kind, blobStore) {}
|
||||
|
|
|
@ -175,7 +175,7 @@ class InsertOp extends ScanOp {
|
|||
return false
|
||||
}
|
||||
if (this.tracking) {
|
||||
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
|
||||
if (!this.tracking.equals(other.tracking)) {
|
||||
return false
|
||||
}
|
||||
} else if (other.tracking) {
|
||||
|
@ -198,10 +198,7 @@ class InsertOp extends ScanOp {
|
|||
throw new Error('Cannot merge with incompatible operation')
|
||||
}
|
||||
this.insertion += other.insertion
|
||||
if (this.tracking != null && other.tracking != null) {
|
||||
this.tracking = this.tracking.mergeWith(other.tracking)
|
||||
}
|
||||
// We already have the same commentIds
|
||||
// We already have the same tracking info and commentIds
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -309,13 +306,9 @@ class RetainOp extends ScanOp {
|
|||
return false
|
||||
}
|
||||
if (this.tracking) {
|
||||
if (!other.tracking || !this.tracking.canMergeWith(other.tracking)) {
|
||||
return false
|
||||
}
|
||||
} else if (other.tracking) {
|
||||
return false
|
||||
return this.tracking.equals(other.tracking)
|
||||
}
|
||||
return true
|
||||
return !other.tracking
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -326,9 +319,6 @@ class RetainOp extends ScanOp {
|
|||
throw new Error('Cannot merge with incompatible operation')
|
||||
}
|
||||
this.length += other.length
|
||||
if (this.tracking != null && other.tracking != null) {
|
||||
this.tracking = this.tracking.mergeWith(other.tracking)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -314,18 +314,25 @@ class TextOperation extends EditOperation {
|
|||
str
|
||||
)
|
||||
}
|
||||
file.trackedChanges.applyRetain(result.length, op.length, {
|
||||
tracking: op.tracking,
|
||||
})
|
||||
result += str.slice(inputCursor, inputCursor + op.length)
|
||||
inputCursor += op.length
|
||||
} else if (op instanceof InsertOp) {
|
||||
if (containsNonBmpChars(op.insertion)) {
|
||||
throw new InvalidInsertionError(str, op.toJSON())
|
||||
}
|
||||
file.trackedChanges.applyInsert(result.length, op.insertion, {
|
||||
tracking: op.tracking,
|
||||
})
|
||||
file.comments.applyInsert(
|
||||
new Range(result.length, op.insertion.length),
|
||||
{ commentIds: op.commentIds }
|
||||
)
|
||||
result += op.insertion
|
||||
} else if (op instanceof RemoveOp) {
|
||||
file.trackedChanges.applyDelete(result.length, op.length)
|
||||
file.comments.applyDelete(new Range(result.length, op.length))
|
||||
inputCursor += op.length
|
||||
} else {
|
||||
|
@ -345,8 +352,6 @@ class TextOperation extends EditOperation {
|
|||
throw new TextOperation.TooLongError(operation, result.length)
|
||||
}
|
||||
|
||||
file.trackedChanges.applyTextOperation(this)
|
||||
|
||||
file.content = result
|
||||
}
|
||||
|
||||
|
@ -395,36 +400,44 @@ class TextOperation extends EditOperation {
|
|||
for (let i = 0, l = ops.length; i < l; i++) {
|
||||
const op = ops[i]
|
||||
if (op instanceof RetainOp) {
|
||||
if (op.tracking) {
|
||||
// Where we need to end up after the retains
|
||||
const target = strIndex + op.length
|
||||
// A previous retain could have overriden some tracking info. Now we
|
||||
// need to restore it.
|
||||
const previousChanges = previousState.trackedChanges.intersectRange(
|
||||
new Range(strIndex, op.length)
|
||||
)
|
||||
// Where we need to end up after the retains
|
||||
const target = strIndex + op.length
|
||||
// A previous retain could have overriden some tracking info. Now we
|
||||
// need to restore it.
|
||||
const previousRanges = previousState.trackedChanges.inRange(
|
||||
new Range(strIndex, op.length)
|
||||
)
|
||||
|
||||
for (const change of previousChanges) {
|
||||
if (strIndex < change.range.start) {
|
||||
inverse.retain(change.range.start - strIndex, {
|
||||
tracking: new ClearTrackingProps(),
|
||||
})
|
||||
strIndex = change.range.start
|
||||
}
|
||||
inverse.retain(change.range.length, {
|
||||
tracking: change.tracking,
|
||||
let removeTrackingInfoIfNeeded
|
||||
if (op.tracking) {
|
||||
removeTrackingInfoIfNeeded = new ClearTrackingProps()
|
||||
}
|
||||
|
||||
for (const trackedChange of previousRanges) {
|
||||
if (strIndex < trackedChange.range.start) {
|
||||
inverse.retain(trackedChange.range.start - strIndex, {
|
||||
tracking: removeTrackingInfoIfNeeded,
|
||||
})
|
||||
strIndex += change.range.length
|
||||
strIndex = trackedChange.range.start
|
||||
}
|
||||
if (strIndex < target) {
|
||||
inverse.retain(target - strIndex, {
|
||||
tracking: new ClearTrackingProps(),
|
||||
if (trackedChange.range.end < strIndex + op.length) {
|
||||
inverse.retain(trackedChange.range.length, {
|
||||
tracking: trackedChange.tracking,
|
||||
})
|
||||
strIndex = target
|
||||
strIndex = trackedChange.range.end
|
||||
}
|
||||
} else {
|
||||
inverse.retain(op.length)
|
||||
strIndex += op.length
|
||||
if (trackedChange.range.end !== strIndex) {
|
||||
// No need to split the range at the end
|
||||
const [left] = trackedChange.range.splitAt(strIndex)
|
||||
inverse.retain(left.length, { tracking: trackedChange.tracking })
|
||||
strIndex = left.end
|
||||
}
|
||||
}
|
||||
if (strIndex < target) {
|
||||
inverse.retain(target - strIndex, {
|
||||
tracking: removeTrackingInfoIfNeeded,
|
||||
})
|
||||
strIndex = target
|
||||
}
|
||||
} else if (op instanceof InsertOp) {
|
||||
inverse.remove(op.insertion.length)
|
||||
|
|
|
@ -86,32 +86,10 @@ class Range {
|
|||
}
|
||||
|
||||
/**
|
||||
* Does this range overlap another range?
|
||||
*
|
||||
* Overlapping means that the two ranges have at least one character in common
|
||||
*
|
||||
* @param {Range} other - the other range
|
||||
* @param {Range} range
|
||||
*/
|
||||
overlaps(other) {
|
||||
return this.start < other.end && this.end > other.start
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this range overlap the start of another range?
|
||||
*
|
||||
* @param {Range} other - the other range
|
||||
*/
|
||||
overlapsStart(other) {
|
||||
return this.start <= other.start && this.end > other.start
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this range overlap the end of another range?
|
||||
*
|
||||
* @param {Range} other - the other range
|
||||
*/
|
||||
overlapsEnd(other) {
|
||||
return this.start < other.end && this.end >= other.end
|
||||
overlaps(range) {
|
||||
return this.start < range.end && this.end > range.start
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -249,26 +227,6 @@ class Range {
|
|||
)
|
||||
return [rangeUpToCursor, rangeAfterCursor]
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the intersection of this range with another range
|
||||
*
|
||||
* @param {Range} other - the other range
|
||||
* @return {Range | null} the intersection or null if the intersection is empty
|
||||
*/
|
||||
intersect(other) {
|
||||
if (this.contains(other)) {
|
||||
return other
|
||||
} else if (other.contains(this)) {
|
||||
return this
|
||||
} else if (other.overlapsStart(this)) {
|
||||
return new Range(this.pos, other.end - this.start)
|
||||
} else if (other.overlapsEnd(this)) {
|
||||
return new Range(other.pos, this.end - other.start)
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Range
|
||||
|
|
|
@ -193,13 +193,4 @@ describe('LazyStringFileData', function () {
|
|||
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||
expect(fileData.getOperations()).to.have.length(1)
|
||||
})
|
||||
|
||||
it('truncates its operations after being stored', async function () {
|
||||
const testHash = File.EMPTY_FILE_HASH
|
||||
const fileData = new LazyStringFileData(testHash, undefined, 0)
|
||||
fileData.edit(new TextOperation().insert('abc'))
|
||||
const stored = await fileData.store(this.blobStore)
|
||||
expect(fileData.hash).to.equal(stored.hash)
|
||||
expect(fileData.operations).to.deep.equal([])
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
|
@ -448,44 +449,4 @@ describe('Range', function () {
|
|||
expect(() => range.insertAt(16, 3)).to.throw()
|
||||
})
|
||||
})
|
||||
|
||||
describe('intersect', function () {
|
||||
it('should handle partially overlapping ranges', function () {
|
||||
const range1 = new Range(5, 10)
|
||||
const range2 = new Range(3, 6)
|
||||
const intersection1 = range1.intersect(range2)
|
||||
expect(intersection1.pos).to.equal(5)
|
||||
expect(intersection1.length).to.equal(4)
|
||||
const intersection2 = range2.intersect(range1)
|
||||
expect(intersection2.pos).to.equal(5)
|
||||
expect(intersection2.length).to.equal(4)
|
||||
})
|
||||
|
||||
it('should intersect with itself', function () {
|
||||
const range = new Range(5, 10)
|
||||
const intersection = range.intersect(range)
|
||||
expect(intersection.pos).to.equal(5)
|
||||
expect(intersection.length).to.equal(10)
|
||||
})
|
||||
|
||||
it('should handle nested ranges', function () {
|
||||
const range1 = new Range(5, 10)
|
||||
const range2 = new Range(7, 2)
|
||||
const intersection1 = range1.intersect(range2)
|
||||
expect(intersection1.pos).to.equal(7)
|
||||
expect(intersection1.length).to.equal(2)
|
||||
const intersection2 = range2.intersect(range1)
|
||||
expect(intersection2.pos).to.equal(7)
|
||||
expect(intersection2.length).to.equal(2)
|
||||
})
|
||||
|
||||
it('should handle disconnected ranges', function () {
|
||||
const range1 = new Range(5, 10)
|
||||
const range2 = new Range(20, 30)
|
||||
const intersection1 = range1.intersect(range2)
|
||||
expect(intersection1).to.be.null
|
||||
const intersection2 = range2.intersect(range1)
|
||||
expect(intersection2).to.be.null
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -107,7 +107,7 @@ describe('RetainOp', function () {
|
|||
expect(op1.equals(new RetainOp(3))).to.be.true
|
||||
})
|
||||
|
||||
it('cannot merge with another RetainOp if the tracking user is different', function () {
|
||||
it('cannot merge with another RetainOp if tracking info is different', function () {
|
||||
const op1 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
|
@ -120,14 +120,14 @@ describe('RetainOp', function () {
|
|||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('can merge with another RetainOp if the tracking user is the same', function () {
|
||||
it('can merge with another RetainOp if tracking info is the same', function () {
|
||||
const op1 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:01.000Z'))
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
op1.mergeWith(op2)
|
||||
expect(
|
||||
|
@ -310,7 +310,7 @@ describe('InsertOp', function () {
|
|||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('cannot merge with another InsertOp if tracking user is different', function () {
|
||||
it('cannot merge with another InsertOp if tracking info is different', function () {
|
||||
const op1 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
|
@ -323,7 +323,7 @@ describe('InsertOp', function () {
|
|||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('can merge with another InsertOp if tracking user and comment info is the same', function () {
|
||||
it('can merge with another InsertOp if tracking and comment info is the same', function () {
|
||||
const op1 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps(
|
||||
|
@ -338,7 +338,7 @@ describe('InsertOp', function () {
|
|||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:01.000Z')
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
),
|
||||
['1', '2']
|
||||
)
|
||||
|
|
|
@ -322,47 +322,6 @@ describe('TextOperation', function () {
|
|||
new TextOperation().retain(4).remove(4).retain(3)
|
||||
)
|
||||
})
|
||||
|
||||
it('undoing a tracked delete restores the tracked changes', function () {
|
||||
expectInverseToLeadToInitialState(
|
||||
new StringFileData(
|
||||
'the quick brown fox jumps over the lazy dog',
|
||||
undefined,
|
||||
[
|
||||
{
|
||||
range: { pos: 5, length: 5 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 12, length: 3 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 18, length: 5 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
]
|
||||
),
|
||||
new TextOperation()
|
||||
.retain(7)
|
||||
.retain(13, {
|
||||
tracking: new TrackingProps('delete', 'user1', new Date()),
|
||||
})
|
||||
.retain(23)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('compose', function () {
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
promise-utils
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
ranges-tracker
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
redis-wrapper
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
settings
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
stream-utils
|
||||
--dependencies=None
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -145,24 +145,6 @@ class LoggerStream extends Transform {
|
|||
}
|
||||
}
|
||||
|
||||
class MeteredStream extends Transform {
|
||||
#Metrics
|
||||
#metric
|
||||
#labels
|
||||
|
||||
constructor(Metrics, metric, labels) {
|
||||
super()
|
||||
this.#Metrics = Metrics
|
||||
this.#metric = metric
|
||||
this.#labels = labels
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, callback) {
|
||||
this.#Metrics.count(this.#metric, chunk.byteLength, 1, this.#labels)
|
||||
callback(null, chunk)
|
||||
}
|
||||
}
|
||||
|
||||
// Export our classes
|
||||
|
||||
module.exports = {
|
||||
|
@ -171,7 +153,6 @@ module.exports = {
|
|||
LoggerStream,
|
||||
LimitedStream,
|
||||
TimeoutStream,
|
||||
MeteredStream,
|
||||
SizeExceededError,
|
||||
AbortError,
|
||||
}
|
||||
|
|
4824
package-lock.json
generated
4824
package-lock.json
generated
File diff suppressed because it is too large
Load diff
32
package.json
32
package.json
|
@ -8,8 +8,8 @@
|
|||
"@types/chai": "^4.3.0",
|
||||
"@types/chai-as-promised": "^7.1.8",
|
||||
"@types/mocha": "^10.0.6",
|
||||
"@typescript-eslint/eslint-plugin": "^8.30.1",
|
||||
"@typescript-eslint/parser": "^8.30.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"eslint": "^8.15.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-config-standard": "^17.0.0",
|
||||
|
@ -18,24 +18,28 @@
|
|||
"eslint-plugin-cypress": "^2.15.1",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-mocha": "^10.1.0",
|
||||
"eslint-plugin-n": "^15.7.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"eslint-plugin-promise": "^6.0.0",
|
||||
"eslint-plugin-unicorn": "^56.0.0",
|
||||
"prettier": "3.6.2",
|
||||
"typescript": "^5.8.3"
|
||||
},
|
||||
"engines": {
|
||||
"npm": "11.4.2"
|
||||
"prettier": "3.3.3",
|
||||
"typescript": "^5.5.4"
|
||||
},
|
||||
"overrides": {
|
||||
"swagger-tools@0.10.4": {
|
||||
"path-to-regexp": "3.3.0",
|
||||
"body-parser": "1.20.3",
|
||||
"multer": "2.0.1"
|
||||
"cross-env": {
|
||||
"cross-spawn": "^7.0.6"
|
||||
},
|
||||
"request@2.88.2": {
|
||||
"tough-cookie": "5.1.2"
|
||||
"fetch-mock": {
|
||||
"path-to-regexp": "3.3.0"
|
||||
},
|
||||
"google-gax": {
|
||||
"protobufjs": "^7.2.5"
|
||||
},
|
||||
"swagger-tools": {
|
||||
"body-parser": "1.20.3",
|
||||
"multer": "2.0.0",
|
||||
"path-to-regexp": "3.3.0",
|
||||
"qs": "6.13.0"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
|
|
|
@ -115,3 +115,9 @@ ENV LOG_LEVEL="info"
|
|||
EXPOSE 80
|
||||
|
||||
ENTRYPOINT ["/sbin/my_init"]
|
||||
|
||||
# Store the revision
|
||||
# ------------------
|
||||
# This should be the last step to optimize docker image caching.
|
||||
ARG MONOREPO_REVISION
|
||||
RUN echo "monorepo-server-ce,$MONOREPO_REVISION" > /var/www/revisions.txt
|
||||
|
|
|
@ -24,7 +24,6 @@ build-base:
|
|||
--cache-from $(OVERLEAF_BASE_BRANCH) \
|
||||
--tag $(OVERLEAF_BASE_TAG) \
|
||||
--tag $(OVERLEAF_BASE_BRANCH) \
|
||||
--network=host \
|
||||
$(MONOREPO_ROOT)
|
||||
|
||||
|
||||
|
@ -34,13 +33,12 @@ build-community:
|
|||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--progress=plain \
|
||||
--build-arg OVERLEAF_BASE_TAG \
|
||||
--label "com.overleaf.ce.revision=$(MONOREPO_REVISION)" \
|
||||
--build-arg MONOREPO_REVISION \
|
||||
--cache-from $(OVERLEAF_LATEST) \
|
||||
--cache-from $(OVERLEAF_BRANCH) \
|
||||
--file Dockerfile \
|
||||
--tag $(OVERLEAF_TAG) \
|
||||
--tag $(OVERLEAF_BRANCH) \
|
||||
--network=host \
|
||||
$(MONOREPO_ROOT)
|
||||
|
||||
SHELLCHECK_OPTS = \
|
||||
|
|
|
@ -184,10 +184,7 @@ const settings = {
|
|||
siteUrl: (siteUrl = process.env.OVERLEAF_SITE_URL || 'http://localhost'),
|
||||
|
||||
// Status page URL as displayed on the maintenance/500 pages.
|
||||
statusPageUrl: process.env.OVERLEAF_STATUS_PAGE_URL ?
|
||||
// Add https:// protocol prefix if not set (Allow plain-text http:// for Server Pro/CE).
|
||||
(process.env.OVERLEAF_STATUS_PAGE_URL.startsWith('http://') || process.env.OVERLEAF_STATUS_PAGE_URL.startsWith('https://')) ? process.env.OVERLEAF_STATUS_PAGE_URL : `https://${process.env.OVERLEAF_STATUS_PAGE_URL}`
|
||||
: undefined,
|
||||
statusPageUrl: process.env.OVERLEAF_STATUS_PAGE_URL,
|
||||
|
||||
// The name this is used to describe your Overleaf Community Edition Installation
|
||||
appName: process.env.OVERLEAF_APP_NAME || 'Overleaf Community Edition',
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -eux
|
||||
|
||||
echo "-------------------------"
|
||||
echo "Deactivating old projects"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -eux
|
||||
|
||||
echo "-------------------------"
|
||||
echo "Expiring deleted projects"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -eux
|
||||
|
||||
echo "----------------------"
|
||||
echo "Expiring deleted users"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -eux
|
||||
|
||||
echo "---------------------------------"
|
||||
echo "Flush all project-history changes"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -eux
|
||||
|
||||
echo "--------------------------"
|
||||
echo "Flush project-history queue"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -eux
|
||||
|
||||
echo "-----------------------------------"
|
||||
echo "Retry project-history errors (hard)"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -eux
|
||||
|
||||
echo "-----------------------------------"
|
||||
echo "Retry project-history errors (soft)"
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
FROM sharelatex/sharelatex:5.5.0
|
||||
|
||||
|
||||
# fix tls configuration in redis for history-v1
|
||||
COPY pr_25168.patch .
|
||||
RUN patch -p1 < pr_25168.patch && rm pr_25168.patch
|
||||
|
||||
# improve logging in history system
|
||||
COPY pr_26086.patch .
|
||||
RUN patch -p1 < pr_26086.patch && rm pr_26086.patch
|
||||
|
||||
# fix create-user.mjs script
|
||||
COPY pr_26152.patch .
|
||||
RUN patch -p1 < pr_26152.patch && rm pr_26152.patch
|
||||
|
||||
# check mongo featureCompatibilityVersion
|
||||
COPY pr_26091.patch .
|
||||
RUN patch -p1 < pr_26091.patch && rm pr_26091.patch
|
||||
|
||||
# update multer and tar-fs
|
||||
RUN sed -i 's/"multer": "2.0.0"/"multer": "2.0.1"/g' package.json
|
||||
RUN sed -i 's/"dockerode": "^4.0.5"/"dockerode": "^4.0.7"/g' services/clsi/package.json
|
||||
RUN sed -i 's/"tar-fs": "^3.0.4"/"tar-fs": "^3.0.9"/g' services/clsi/package.json
|
||||
RUN sed -i 's/199c5ff05bd375c508f4074498237baead7f5148/4dbceda355efc3fc8ac3cf5c66c3778c8a6fdb23/g' services/web/package.json
|
||||
COPY package-lock.json.diff .
|
||||
RUN patch package-lock.json < package-lock.json.diff
|
||||
RUN npm install --omit=dev
|
||||
RUN npm install @paralleldrive/cuid2@2.2.2 -w services/history-v1
|
File diff suppressed because it is too large
Load diff
|
@ -1,19 +0,0 @@
|
|||
--- a/services/history-v1/config/custom-environment-variables.json
|
||||
+++ b/services/history-v1/config/custom-environment-variables.json
|
||||
@@ -50,12 +50,14 @@
|
||||
"history": {
|
||||
"host": "OVERLEAF_REDIS_HOST",
|
||||
"password": "OVERLEAF_REDIS_PASS",
|
||||
- "port": "OVERLEAF_REDIS_PORT"
|
||||
+ "port": "OVERLEAF_REDIS_PORT",
|
||||
+ "tls": "OVERLEAF_REDIS_TLS"
|
||||
},
|
||||
"lock": {
|
||||
"host": "OVERLEAF_REDIS_HOST",
|
||||
"password": "OVERLEAF_REDIS_PASS",
|
||||
- "port": "OVERLEAF_REDIS_PORT"
|
||||
+ "port": "OVERLEAF_REDIS_PORT",
|
||||
+ "tls": "OVERLEAF_REDIS_TLS"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,200 +0,0 @@
|
|||
--- a/services/history-v1/api/controllers/project_import.js
|
||||
+++ b/services/history-v1/api/controllers/project_import.js
|
||||
@@ -35,6 +35,7 @@ async function importSnapshot(req, res) {
|
||||
try {
|
||||
snapshot = Snapshot.fromRaw(rawSnapshot)
|
||||
} catch (err) {
|
||||
+ logger.warn({ err, projectId }, 'failed to import snapshot')
|
||||
return render.unprocessableEntity(res)
|
||||
}
|
||||
|
||||
@@ -43,6 +44,7 @@ async function importSnapshot(req, res) {
|
||||
historyId = await chunkStore.initializeProject(projectId, snapshot)
|
||||
} catch (err) {
|
||||
if (err instanceof chunkStore.AlreadyInitialized) {
|
||||
+ logger.warn({ err, projectId }, 'already initialized')
|
||||
return render.conflict(res)
|
||||
} else {
|
||||
throw err
|
||||
--- a/services/history-v1/api/controllers/projects.js
|
||||
+++ b/services/history-v1/api/controllers/projects.js
|
||||
@@ -34,6 +34,7 @@ async function initializeProject(req, res, next) {
|
||||
res.status(HTTPStatus.OK).json({ projectId })
|
||||
} catch (err) {
|
||||
if (err instanceof chunkStore.AlreadyInitialized) {
|
||||
+ logger.warn({ err, projectId }, 'failed to initialize')
|
||||
render.conflict(res)
|
||||
} else {
|
||||
throw err
|
||||
@@ -242,11 +243,15 @@ async function createProjectBlob(req, res, next) {
|
||||
const sizeLimit = new StreamSizeLimit(maxUploadSize)
|
||||
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
|
||||
if (sizeLimit.sizeLimitExceeded) {
|
||||
+ logger.warn(
|
||||
+ { projectId, expectedHash, maxUploadSize },
|
||||
+ 'blob exceeds size threshold'
|
||||
+ )
|
||||
return render.requestEntityTooLarge(res)
|
||||
}
|
||||
const hash = await blobHash.fromFile(tmpPath)
|
||||
if (hash !== expectedHash) {
|
||||
- logger.debug({ hash, expectedHash }, 'Hash mismatch')
|
||||
+ logger.warn({ projectId, hash, expectedHash }, 'Hash mismatch')
|
||||
return render.conflict(res, 'File hash mismatch')
|
||||
}
|
||||
|
||||
@@ -343,6 +348,10 @@ async function copyProjectBlob(req, res, next) {
|
||||
targetBlobStore.getBlob(blobHash),
|
||||
])
|
||||
if (!sourceBlob) {
|
||||
+ logger.warn(
|
||||
+ { sourceProjectId, targetProjectId, blobHash },
|
||||
+ 'missing source blob when copying across projects'
|
||||
+ )
|
||||
return render.notFound(res)
|
||||
}
|
||||
// Exit early if the blob exists in the target project.
|
||||
--- a/services/history-v1/app.js
|
||||
+++ b/services/history-v1/app.js
|
||||
@@ -100,11 +100,13 @@ function setupErrorHandling() {
|
||||
})
|
||||
}
|
||||
if (err.code === 'ENUM_MISMATCH') {
|
||||
+ logger.warn({ err, projectId }, err.message)
|
||||
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
|
||||
message: 'invalid enum value: ' + err.paramName,
|
||||
})
|
||||
}
|
||||
if (err.code === 'REQUIRED') {
|
||||
+ logger.warn({ err, projectId }, err.message)
|
||||
return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({
|
||||
message: err.message,
|
||||
})
|
||||
--- a/services/project-history/app/js/HistoryStoreManager.js
|
||||
+++ b/services/project-history/app/js/HistoryStoreManager.js
|
||||
@@ -35,7 +35,10 @@ class StringStream extends stream.Readable {
|
||||
_mocks.getMostRecentChunk = (projectId, historyId, callback) => {
|
||||
const path = `projects/${historyId}/latest/history`
|
||||
logger.debug({ projectId, historyId }, 'getting chunk from history service')
|
||||
- _requestChunk({ path, json: true }, callback)
|
||||
+ _requestChunk({ path, json: true }, (err, chunk) => {
|
||||
+ if (err) return callback(OError.tag(err))
|
||||
+ callback(null, chunk)
|
||||
+ })
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -54,7 +57,10 @@ export function getChunkAtVersion(projectId, historyId, version, callback) {
|
||||
{ projectId, historyId, version },
|
||||
'getting chunk from history service for version'
|
||||
)
|
||||
- _requestChunk({ path, json: true }, callback)
|
||||
+ _requestChunk({ path, json: true }, (err, chunk) => {
|
||||
+ if (err) return callback(OError.tag(err))
|
||||
+ callback(null, chunk)
|
||||
+ })
|
||||
}
|
||||
|
||||
export function getMostRecentVersion(projectId, historyId, callback) {
|
||||
@@ -68,8 +74,10 @@ export function getMostRecentVersion(projectId, historyId, callback) {
|
||||
_.sortBy(chunk.chunk.history.changes || [], x => x.timestamp)
|
||||
)
|
||||
// find the latest project and doc versions in the chunk
|
||||
- _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) =>
|
||||
+ _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => {
|
||||
+ if (err1) err1 = OError.tag(err1)
|
||||
_getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => {
|
||||
+ if (err2) err2 = OError.tag(err2)
|
||||
// return the project and doc versions
|
||||
const projectStructureAndDocVersions = {
|
||||
project: projectVersion,
|
||||
@@ -83,7 +91,7 @@ export function getMostRecentVersion(projectId, historyId, callback) {
|
||||
chunk
|
||||
)
|
||||
})
|
||||
- )
|
||||
+ })
|
||||
})
|
||||
}
|
||||
|
||||
@@ -211,7 +219,10 @@ export function getProjectBlob(historyId, blobHash, callback) {
|
||||
logger.debug({ historyId, blobHash }, 'getting blob from history service')
|
||||
_requestHistoryService(
|
||||
{ path: `projects/${historyId}/blobs/${blobHash}` },
|
||||
- callback
|
||||
+ (err, blob) => {
|
||||
+ if (err) return callback(OError.tag(err))
|
||||
+ callback(null, blob)
|
||||
+ }
|
||||
)
|
||||
}
|
||||
|
||||
@@ -277,7 +288,10 @@ function createBlobFromString(historyId, data, fileId, callback) {
|
||||
(fsPath, cb) => {
|
||||
_createBlob(historyId, fsPath, cb)
|
||||
},
|
||||
- callback
|
||||
+ (err, hash) => {
|
||||
+ if (err) return callback(OError.tag(err))
|
||||
+ callback(null, hash)
|
||||
+ }
|
||||
)
|
||||
}
|
||||
|
||||
@@ -330,7 +344,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||
try {
|
||||
ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update)
|
||||
} catch (error) {
|
||||
- return callback(error)
|
||||
+ return callback(OError.tag(error))
|
||||
}
|
||||
createBlobFromString(
|
||||
historyId,
|
||||
@@ -338,7 +352,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||
`project-${projectId}-doc-${update.doc}`,
|
||||
(err, fileHash) => {
|
||||
if (err) {
|
||||
- return callback(err)
|
||||
+ return callback(OError.tag(err))
|
||||
}
|
||||
if (ranges) {
|
||||
createBlobFromString(
|
||||
@@ -347,7 +361,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||
`project-${projectId}-doc-${update.doc}-ranges`,
|
||||
(err, rangesHash) => {
|
||||
if (err) {
|
||||
- return callback(err)
|
||||
+ return callback(OError.tag(err))
|
||||
}
|
||||
logger.debug(
|
||||
{ fileHash, rangesHash },
|
||||
@@ -415,7 +429,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||
},
|
||||
(err, fileHash) => {
|
||||
if (err) {
|
||||
- return callback(err)
|
||||
+ return callback(OError.tag(err))
|
||||
}
|
||||
if (update.hash && update.hash !== fileHash) {
|
||||
logger.warn(
|
||||
@@ -447,7 +461,7 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
|
||||
},
|
||||
(err, fileHash) => {
|
||||
if (err) {
|
||||
- return callback(err)
|
||||
+ return callback(OError.tag(err))
|
||||
}
|
||||
logger.debug({ fileHash }, 'created empty blob for file')
|
||||
callback(null, { file: fileHash })
|
||||
@@ -520,7 +534,10 @@ export function initializeProject(historyId, callback) {
|
||||
export function deleteProject(projectId, callback) {
|
||||
_requestHistoryService(
|
||||
{ method: 'DELETE', path: `projects/${projectId}` },
|
||||
- callback
|
||||
+ err => {
|
||||
+ if (err) return callback(OError.tag(err))
|
||||
+ callback(null)
|
||||
+ }
|
||||
)
|
||||
}
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
||||
+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MIN_MONGO_VERSION = [6, 0]
|
||||
+const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
|
||||
|
||||
async function main() {
|
||||
let mongoClient
|
||||
@@ -18,6 +19,7 @@ async function main() {
|
||||
}
|
||||
|
||||
await checkMongoVersion(mongoClient)
|
||||
+ await checkFeatureCompatibilityVersion(mongoClient)
|
||||
|
||||
try {
|
||||
await testTransactions(mongoClient)
|
||||
@@ -53,6 +55,41 @@ async function checkMongoVersion(mongoClient) {
|
||||
}
|
||||
}
|
||||
|
||||
+async function checkFeatureCompatibilityVersion(mongoClient) {
|
||||
+ const {
|
||||
+ featureCompatibilityVersion: { version },
|
||||
+ } = await mongoClient
|
||||
+ .db()
|
||||
+ .admin()
|
||||
+ .command({ getParameter: 1, featureCompatibilityVersion: 1 })
|
||||
+ const [major, minor] = version.split('.').map(v => parseInt(v))
|
||||
+ const [minMajor, minMinor] = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION
|
||||
+
|
||||
+ if (major < minMajor || (major === minMajor && minor < minMinor)) {
|
||||
+ const minVersion = MIN_MONGO_FEATURE_COMPATIBILITY_VERSION.join('.')
|
||||
+ console.error(`
|
||||
+The MongoDB server has featureCompatibilityVersion=${version}, but Overleaf requires at least version ${minVersion}.
|
||||
+
|
||||
+Open a mongo shell:
|
||||
+- Overleaf Toolkit deployments: $ bin/mongo
|
||||
+- Legacy docker-compose.yml deployments: $ docker exec -it mongo mongosh localhost/sharelatex
|
||||
+
|
||||
+In the mongo shell:
|
||||
+> db.adminCommand( { setFeatureCompatibilityVersion: "${minMajor}.${minMinor}" } )
|
||||
+
|
||||
+Verify the new value:
|
||||
+> db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } )
|
||||
+ ...
|
||||
+ {
|
||||
+ featureCompatibilityVersion: { version: ${minMajor}.${minMinor}' },
|
||||
+...
|
||||
+
|
||||
+Aborting.
|
||||
+`)
|
||||
+ process.exit(1)
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Mongodb is up.')
|
|
@ -1,16 +0,0 @@
|
|||
--- a/services/web/modules/server-ce-scripts/scripts/create-user.mjs
|
||||
+++ b/services/web/modules/server-ce-scripts/scripts/create-user.mjs
|
||||
@@ -48,3 +48,13 @@ Please visit the following URL to set a password for ${email} and log in:
|
||||
)
|
||||
})
|
||||
}
|
||||
+
|
||||
+if (filename === process.argv[1]) {
|
||||
+ try {
|
||||
+ await main()
|
||||
+ process.exit(0)
|
||||
+ } catch (error) {
|
||||
+ console.error({ error })
|
||||
+ process.exit(1)
|
||||
+ }
|
||||
+}
|
|
@ -1,27 +0,0 @@
|
|||
FROM sharelatex/sharelatex:5.5.1
|
||||
|
||||
# https://github.com/overleaf/internal/pull/25944
|
||||
# Removed changes to services/web/frontend/js/features/ide-redesign/components/rail.tsx due to incompatibility with 5.5.1
|
||||
COPY pr_25944.patch .
|
||||
RUN patch -p1 < pr_25944.patch && rm pr_25944.patch
|
||||
|
||||
# https://github.com/overleaf/internal/pull/26637
|
||||
# Removed changes to server-ce/test/create-and-compile-project.spec.ts and server-ce/test/helpers/compile.ts due to incompatibility with 5.5.1
|
||||
COPY pr_26637.patch .
|
||||
RUN patch -p1 < pr_26637.patch && rm pr_26637.patch
|
||||
|
||||
# https://github.com/overleaf/internal/pull/26783
|
||||
COPY pr_26783.patch .
|
||||
RUN patch -p1 < pr_26783.patch && rm pr_26783.patch
|
||||
|
||||
# https://github.com/overleaf/internal/pull/26697
|
||||
COPY pr_26697.patch .
|
||||
RUN patch -p1 < pr_26697.patch && rm pr_26697.patch
|
||||
|
||||
# Apply security updates to base image
|
||||
RUN apt update && apt install -y linux-libc-dev \
|
||||
&& unattended-upgrade --verbose --no-minimal-upgrade-steps \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Recompile frontend assets
|
||||
RUN node genScript compile | bash
|
|
@ -1,219 +0,0 @@
|
|||
diff --git a/services/web/frontend/js/features/review-panel-new/context/review-panel-providers.tsx b/services/web/frontend/js/features/review-panel-new/context/review-panel-providers.tsx
|
||||
index 20e157dfee9..ad943772d0d 100644
|
||||
--- a/services/web/frontend/js/features/review-panel-new/context/review-panel-providers.tsx
|
||||
+++ b/services/web/frontend/js/features/review-panel-new/context/review-panel-providers.tsx
|
||||
@@ -4,10 +4,16 @@ import { ChangesUsersProvider } from './changes-users-context'
|
||||
import { TrackChangesStateProvider } from './track-changes-state-context'
|
||||
import { ThreadsProvider } from './threads-context'
|
||||
import { ReviewPanelViewProvider } from './review-panel-view-context'
|
||||
+import { useProjectContext } from '@/shared/context/project-context'
|
||||
|
||||
export const ReviewPanelProviders: FC<React.PropsWithChildren> = ({
|
||||
children,
|
||||
}) => {
|
||||
+ const { features } = useProjectContext()
|
||||
+ if (!features.trackChangesVisible) {
|
||||
+ return children
|
||||
+ }
|
||||
+
|
||||
return (
|
||||
<ReviewPanelViewProvider>
|
||||
<ChangesUsersProvider>
|
||||
diff --git a/services/web/frontend/js/features/share-project-modal/components/add-collaborators.tsx b/services/web/frontend/js/features/share-project-modal/components/add-collaborators.tsx
|
||||
index 8606fb11fad..e80fb037116 100644
|
||||
--- a/services/web/frontend/js/features/share-project-modal/components/add-collaborators.tsx
|
||||
+++ b/services/web/frontend/js/features/share-project-modal/components/add-collaborators.tsx
|
||||
@@ -176,24 +176,34 @@ export default function AddCollaborators({ readOnly }: { readOnly?: boolean }) {
|
||||
])
|
||||
|
||||
const privilegeOptions = useMemo(() => {
|
||||
- return [
|
||||
+ const options: {
|
||||
+ key: string
|
||||
+ label: string
|
||||
+ description?: string | null
|
||||
+ }[] = [
|
||||
{
|
||||
key: 'readAndWrite',
|
||||
label: t('editor'),
|
||||
},
|
||||
- {
|
||||
+ ]
|
||||
+
|
||||
+ if (features.trackChangesVisible) {
|
||||
+ options.push({
|
||||
key: 'review',
|
||||
label: t('reviewer'),
|
||||
description: !features.trackChanges
|
||||
? t('comment_only_upgrade_for_track_changes')
|
||||
: null,
|
||||
- },
|
||||
- {
|
||||
- key: 'readOnly',
|
||||
- label: t('viewer'),
|
||||
- },
|
||||
- ]
|
||||
- }, [features.trackChanges, t])
|
||||
+ })
|
||||
+ }
|
||||
+
|
||||
+ options.push({
|
||||
+ key: 'readOnly',
|
||||
+ label: t('viewer'),
|
||||
+ })
|
||||
+
|
||||
+ return options
|
||||
+ }, [features.trackChanges, features.trackChangesVisible, t])
|
||||
|
||||
return (
|
||||
<OLForm className="add-collabs">
|
||||
diff --git a/services/web/frontend/js/features/share-project-modal/components/edit-member.tsx b/services/web/frontend/js/features/share-project-modal/components/edit-member.tsx
|
||||
index 6d806968b12..9f24cddc4ad 100644
|
||||
--- a/services/web/frontend/js/features/share-project-modal/components/edit-member.tsx
|
||||
+++ b/services/web/frontend/js/features/share-project-modal/components/edit-member.tsx
|
||||
@@ -244,14 +244,22 @@ function SelectPrivilege({
|
||||
const { features } = useProjectContext()
|
||||
|
||||
const privileges = useMemo(
|
||||
- (): Privilege[] => [
|
||||
- { key: 'owner', label: t('make_owner') },
|
||||
- { key: 'readAndWrite', label: t('editor') },
|
||||
- { key: 'review', label: t('reviewer') },
|
||||
- { key: 'readOnly', label: t('viewer') },
|
||||
- { key: 'removeAccess', label: t('remove_access') },
|
||||
- ],
|
||||
- [t]
|
||||
+ (): Privilege[] =>
|
||||
+ features.trackChangesVisible
|
||||
+ ? [
|
||||
+ { key: 'owner', label: t('make_owner') },
|
||||
+ { key: 'readAndWrite', label: t('editor') },
|
||||
+ { key: 'review', label: t('reviewer') },
|
||||
+ { key: 'readOnly', label: t('viewer') },
|
||||
+ { key: 'removeAccess', label: t('remove_access') },
|
||||
+ ]
|
||||
+ : [
|
||||
+ { key: 'owner', label: t('make_owner') },
|
||||
+ { key: 'readAndWrite', label: t('editor') },
|
||||
+ { key: 'readOnly', label: t('viewer') },
|
||||
+ { key: 'removeAccess', label: t('remove_access') },
|
||||
+ ],
|
||||
+ [features.trackChangesVisible, t]
|
||||
)
|
||||
|
||||
const downgradedPseudoPrivilege: Privilege = {
|
||||
diff --git a/services/web/frontend/js/features/source-editor/components/codemirror-editor.tsx b/services/web/frontend/js/features/source-editor/components/codemirror-editor.tsx
|
||||
index c1808cbb301..4bdfe2682c8 100644
|
||||
--- a/services/web/frontend/js/features/source-editor/components/codemirror-editor.tsx
|
||||
+++ b/services/web/frontend/js/features/source-editor/components/codemirror-editor.tsx
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
} from './codemirror-context'
|
||||
import MathPreviewTooltip from './math-preview-tooltip'
|
||||
import { useToolbarMenuBarEditorCommands } from '@/features/ide-redesign/hooks/use-toolbar-menu-editor-commands'
|
||||
+import { useProjectContext } from '@/shared/context/project-context'
|
||||
|
||||
// TODO: remove this when definitely no longer used
|
||||
export * from './codemirror-context'
|
||||
@@ -67,6 +68,7 @@ function CodeMirrorEditor() {
|
||||
|
||||
function CodeMirrorEditorComponents() {
|
||||
useToolbarMenuBarEditorCommands()
|
||||
+ const { features } = useProjectContext()
|
||||
|
||||
return (
|
||||
<ReviewPanelProviders>
|
||||
@@ -83,8 +85,8 @@ function CodeMirrorEditorComponents() {
|
||||
<CodeMirrorCommandTooltip />
|
||||
|
||||
<MathPreviewTooltip />
|
||||
- <ReviewTooltipMenu />
|
||||
- <ReviewPanelNew />
|
||||
+ {features.trackChangesVisible && <ReviewTooltipMenu />}
|
||||
+ {features.trackChangesVisible && <ReviewPanelNew />}
|
||||
|
||||
{sourceEditorComponents.map(
|
||||
({ import: { default: Component }, path }) => (
|
||||
diff --git a/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx b/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
||||
index e70663683fc..c5d9f3d3e47 100644
|
||||
--- a/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
||||
+++ b/services/web/frontend/js/features/source-editor/components/toolbar/toolbar-items.tsx
|
||||
@@ -14,6 +14,7 @@ import { LegacyTableDropdown } from './table-inserter-dropdown-legacy'
|
||||
import { withinFormattingCommand } from '@/features/source-editor/utils/tree-operations/formatting'
|
||||
import { isSplitTestEnabled } from '@/utils/splitTestUtils'
|
||||
import { isMac } from '@/shared/utils/os'
|
||||
+import { useProjectContext } from '@/shared/context/project-context'
|
||||
|
||||
export const ToolbarItems: FC<{
|
||||
state: EditorState
|
||||
@@ -31,6 +32,7 @@ export const ToolbarItems: FC<{
|
||||
const { t } = useTranslation()
|
||||
const { toggleSymbolPalette, showSymbolPalette, writefullInstance } =
|
||||
useEditorContext()
|
||||
+ const { features } = useProjectContext()
|
||||
const isActive = withinFormattingCommand(state)
|
||||
|
||||
const symbolPaletteAvailable = getMeta('ol-symbolPaletteAvailable')
|
||||
@@ -127,13 +129,15 @@ export const ToolbarItems: FC<{
|
||||
command={commands.wrapInHref}
|
||||
icon="add_link"
|
||||
/>
|
||||
- <ToolbarButton
|
||||
- id="toolbar-add-comment"
|
||||
- label={t('add_comment')}
|
||||
- disabled={state.selection.main.empty}
|
||||
- command={commands.addComment}
|
||||
- icon="add_comment"
|
||||
- />
|
||||
+ {features.trackChangesVisible && (
|
||||
+ <ToolbarButton
|
||||
+ id="toolbar-add-comment"
|
||||
+ label={t('add_comment')}
|
||||
+ disabled={state.selection.main.empty}
|
||||
+ command={commands.addComment}
|
||||
+ icon="add_comment"
|
||||
+ />
|
||||
+ )}
|
||||
<ToolbarButton
|
||||
id="toolbar-ref"
|
||||
label={t('toolbar_insert_cross_reference')}
|
||||
diff --git a/services/web/test/frontend/features/review-panel/review-panel.spec.tsx b/services/web/test/frontend/features/review-panel/review-panel.spec.tsx
|
||||
index d6677878108..58ac3e443da 100644
|
||||
--- a/services/web/test/frontend/features/review-panel/review-panel.spec.tsx
|
||||
+++ b/services/web/test/frontend/features/review-panel/review-panel.spec.tsx
|
||||
@@ -181,6 +181,7 @@ describe('<ReviewPanel />', function () {
|
||||
removeChangeIds,
|
||||
},
|
||||
},
|
||||
+ projectFeatures: { trackChangesVisible: true },
|
||||
})
|
||||
|
||||
cy.wrap(scope).as('scope')
|
||||
@@ -626,7 +627,7 @@ describe('<ReviewPanel /> for free users', function () {
|
||||
function mountEditor(ownerId = USER_ID) {
|
||||
const scope = mockScope(undefined, {
|
||||
permissions: { write: true, trackedWrite: false, comment: true },
|
||||
- projectFeatures: { trackChanges: false },
|
||||
+ projectFeatures: { trackChanges: false, trackChangesVisible: true },
|
||||
projectOwner: {
|
||||
_id: ownerId,
|
||||
},
|
||||
diff --git a/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx b/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx
|
||||
index b86207fb0f7..dfce8134d1c 100644
|
||||
--- a/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx
|
||||
+++ b/services/web/test/frontend/features/share-project-modal/components/share-project-modal.test.jsx
|
||||
@@ -694,6 +694,7 @@ describe('<ShareProjectModal/>', function () {
|
||||
features: {
|
||||
collaborators: 0,
|
||||
compileGroup: 'standard',
|
||||
+ trackChangesVisible: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -723,6 +724,7 @@ describe('<ShareProjectModal/>', function () {
|
||||
...project,
|
||||
features: {
|
||||
collaborators: 1,
|
||||
+ trackChangesVisible: true,
|
||||
},
|
||||
members: [
|
||||
{
|
|
@ -1,86 +0,0 @@
|
|||
diff --git a/services/clsi/app/js/LocalCommandRunner.js b/services/clsi/app/js/LocalCommandRunner.js
|
||||
index ce274733585..aa62825443c 100644
|
||||
--- a/services/clsi/app/js/LocalCommandRunner.js
|
||||
+++ b/services/clsi/app/js/LocalCommandRunner.js
|
||||
@@ -54,6 +54,7 @@ module.exports = CommandRunner = {
|
||||
cwd: directory,
|
||||
env,
|
||||
stdio: ['pipe', 'pipe', 'ignore'],
|
||||
+ detached: true,
|
||||
})
|
||||
|
||||
let stdout = ''
|
||||
diff --git a/services/clsi/test/acceptance/js/StopCompile.js b/services/clsi/test/acceptance/js/StopCompile.js
|
||||
new file mode 100644
|
||||
index 00000000000..103a70f37d7
|
||||
--- /dev/null
|
||||
+++ b/services/clsi/test/acceptance/js/StopCompile.js
|
||||
@@ -0,0 +1,47 @@
|
||||
+const Client = require('./helpers/Client')
|
||||
+const ClsiApp = require('./helpers/ClsiApp')
|
||||
+const { expect } = require('chai')
|
||||
+
|
||||
+describe('Stop compile', function () {
|
||||
+ before(function (done) {
|
||||
+ this.request = {
|
||||
+ options: {
|
||||
+ timeout: 100,
|
||||
+ }, // seconds
|
||||
+ resources: [
|
||||
+ {
|
||||
+ path: 'main.tex',
|
||||
+ content: `\
|
||||
+\\documentclass{article}
|
||||
+\\begin{document}
|
||||
+\\def\\x{Hello!\\par\\x}
|
||||
+\\x
|
||||
+\\end{document}\
|
||||
+`,
|
||||
+ },
|
||||
+ ],
|
||||
+ }
|
||||
+ this.project_id = Client.randomId()
|
||||
+ ClsiApp.ensureRunning(() => {
|
||||
+ // start the compile in the background
|
||||
+ Client.compile(this.project_id, this.request, (error, res, body) => {
|
||||
+ this.compileResult = { error, res, body }
|
||||
+ })
|
||||
+ // wait for 1 second before stopping the compile
|
||||
+ setTimeout(() => {
|
||||
+ Client.stopCompile(this.project_id, (error, res, body) => {
|
||||
+ this.stopResult = { error, res, body }
|
||||
+ setTimeout(done, 1000) // allow time for the compile request to terminate
|
||||
+ })
|
||||
+ }, 1000)
|
||||
+ })
|
||||
+ })
|
||||
+
|
||||
+ it('should force a compile response with an error status', function () {
|
||||
+ expect(this.stopResult.error).to.be.null
|
||||
+ expect(this.stopResult.res.statusCode).to.equal(204)
|
||||
+ expect(this.compileResult.res.statusCode).to.equal(200)
|
||||
+ expect(this.compileResult.body.compile.status).to.equal('terminated')
|
||||
+ expect(this.compileResult.body.compile.error).to.equal('terminated')
|
||||
+ })
|
||||
+})
|
||||
diff --git a/services/clsi/test/acceptance/js/helpers/Client.js b/services/clsi/test/acceptance/js/helpers/Client.js
|
||||
index a0bdce734f3..49bf7390c6f 100644
|
||||
--- a/services/clsi/test/acceptance/js/helpers/Client.js
|
||||
+++ b/services/clsi/test/acceptance/js/helpers/Client.js
|
||||
@@ -42,6 +42,16 @@ module.exports = Client = {
|
||||
)
|
||||
},
|
||||
|
||||
+ stopCompile(projectId, callback) {
|
||||
+ if (callback == null) {
|
||||
+ callback = function () {}
|
||||
+ }
|
||||
+ return request.post(
|
||||
+ { url: `${this.host}/project/${projectId}/compile/stop` },
|
||||
+ callback
|
||||
+ )
|
||||
+ },
|
||||
+
|
||||
clearCache(projectId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
|
@ -1,172 +0,0 @@
|
|||
diff --git a/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx b/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx
|
||||
index 8f3b3a8e5d0..f8c8014e1c0 100644
|
||||
--- a/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx
|
||||
+++ b/services/web/frontend/js/features/project-list/components/project-list-ds-nav.tsx
|
||||
@@ -55,7 +55,11 @@ export function ProjectListDsNav() {
|
||||
|
||||
return (
|
||||
<div className="project-ds-nav-page website-redesign">
|
||||
- <DefaultNavbar {...navbarProps} customLogo={overleafLogo} showCloseIcon />
|
||||
+ <DefaultNavbar
|
||||
+ {...navbarProps}
|
||||
+ overleafLogo={overleafLogo}
|
||||
+ showCloseIcon
|
||||
+ />
|
||||
<main className="project-list-wrapper">
|
||||
<SidebarDsNav />
|
||||
<div className="project-ds-nav-content-and-messages">
|
||||
diff --git a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
|
||||
index 2480b7f061f..8e5429dbde6 100644
|
||||
--- a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
|
||||
+++ b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/default-navbar.tsx
|
||||
@@ -1,4 +1,4 @@
|
||||
-import { useState } from 'react'
|
||||
+import React, { useState } from 'react'
|
||||
import { sendMB } from '@/infrastructure/event-tracking'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { Button, Container, Nav, Navbar } from 'react-bootstrap'
|
||||
@@ -13,9 +13,15 @@ import MaterialIcon from '@/shared/components/material-icon'
|
||||
import { useContactUsModal } from '@/shared/hooks/use-contact-us-modal'
|
||||
import { UserProvider } from '@/shared/context/user-context'
|
||||
import { X } from '@phosphor-icons/react'
|
||||
+import overleafWhiteLogo from '@/shared/svgs/overleaf-white.svg'
|
||||
+import overleafBlackLogo from '@/shared/svgs/overleaf-black.svg'
|
||||
+import type { CSSPropertiesWithVariables } from '../../../../../../../types/css-properties-with-variables'
|
||||
|
||||
-function DefaultNavbar(props: DefaultNavbarMetadata) {
|
||||
+function DefaultNavbar(
|
||||
+ props: DefaultNavbarMetadata & { overleafLogo?: string }
|
||||
+) {
|
||||
const {
|
||||
+ overleafLogo,
|
||||
customLogo,
|
||||
title,
|
||||
canDisplayAdminMenu,
|
||||
@@ -49,10 +55,20 @@ function DefaultNavbar(props: DefaultNavbarMetadata) {
|
||||
className="navbar-default navbar-main"
|
||||
expand="lg"
|
||||
onToggle={expanded => setExpanded(expanded)}
|
||||
+ style={
|
||||
+ {
|
||||
+ '--navbar-brand-image-default-url': `url("${overleafWhiteLogo}")`,
|
||||
+ '--navbar-brand-image-redesign-url': `url("${overleafBlackLogo}")`,
|
||||
+ } as CSSPropertiesWithVariables
|
||||
+ }
|
||||
>
|
||||
<Container className="navbar-container" fluid>
|
||||
<div className="navbar-header">
|
||||
- <HeaderLogoOrTitle title={title} customLogo={customLogo} />
|
||||
+ <HeaderLogoOrTitle
|
||||
+ title={title}
|
||||
+ overleafLogo={overleafLogo}
|
||||
+ customLogo={customLogo}
|
||||
+ />
|
||||
{enableUpgradeButton ? (
|
||||
<Button
|
||||
as="a"
|
||||
diff --git a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/header-logo-or-title.tsx b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/header-logo-or-title.tsx
|
||||
index 44500f1b826..3eefc8e2d1c 100644
|
||||
--- a/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/header-logo-or-title.tsx
|
||||
+++ b/services/web/frontend/js/features/ui/components/bootstrap-5/navbar/header-logo-or-title.tsx
|
||||
@@ -2,11 +2,13 @@ import type { DefaultNavbarMetadata } from '@/features/ui/components/types/defau
|
||||
import getMeta from '@/utils/meta'
|
||||
|
||||
export default function HeaderLogoOrTitle({
|
||||
+ overleafLogo,
|
||||
customLogo,
|
||||
title,
|
||||
-}: Pick<DefaultNavbarMetadata, 'customLogo' | 'title'>) {
|
||||
+}: Pick<DefaultNavbarMetadata, 'customLogo' | 'title'> & {
|
||||
+ overleafLogo?: string
|
||||
+}) {
|
||||
const { appName } = getMeta('ol-ExposedSettings')
|
||||
-
|
||||
if (customLogo) {
|
||||
return (
|
||||
// eslint-disable-next-line jsx-a11y/anchor-has-content
|
||||
@@ -24,9 +26,16 @@ export default function HeaderLogoOrTitle({
|
||||
</a>
|
||||
)
|
||||
} else {
|
||||
+ const style = overleafLogo
|
||||
+ ? {
|
||||
+ style: {
|
||||
+ backgroundImage: `url("${overleafLogo}")`,
|
||||
+ },
|
||||
+ }
|
||||
+ : null
|
||||
return (
|
||||
// eslint-disable-next-line jsx-a11y/anchor-has-content
|
||||
- <a href="/" aria-label={appName} className="navbar-brand" />
|
||||
+ <a href="/" aria-label={appName} className="navbar-brand" {...style} />
|
||||
)
|
||||
}
|
||||
}
|
||||
diff --git a/services/web/frontend/js/shared/svgs/overleaf-black.svg b/services/web/frontend/js/shared/svgs/overleaf-black.svg
|
||||
new file mode 100644
|
||||
index 00000000000..ea0678438ba
|
||||
--- /dev/null
|
||||
+++ b/services/web/frontend/js/shared/svgs/overleaf-black.svg
|
||||
@@ -0,0 +1,9 @@
|
||||
+<svg width="129" height="38" viewBox="0 0 129 38" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
+<mask id="mask0_2579_355" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="0" width="72" height="38">
|
||||
+<path d="M71.7643 37.6327H0.0244141V0.0717773H71.7643V37.6327Z" fill="white"/>
|
||||
+</mask>
|
||||
+<g mask="url(#mask0_2579_355)">
|
||||
+<path d="M47.2509 26.4555C47.3948 27.7507 47.8985 28.7821 48.81 29.5257C49.6974 30.2692 50.8487 30.653 52.2638 30.653C53.1993 30.653 54.0387 30.4611 54.7823 30.0773C55.5258 29.6696 56.1255 29.1419 56.5572 28.4223H61.0664C60.2989 30.3891 59.1716 31.9002 57.6365 33.0035C56.1255 34.0829 54.3506 34.6345 52.3598 34.6345C51.0166 34.6345 49.7934 34.3947 48.666 33.915C47.5387 33.4352 46.5314 32.7397 45.6199 31.8043C44.7804 30.9168 44.1089 29.9094 43.6531 28.7341C43.1974 27.5589 42.9576 26.3836 42.9576 25.1603C42.9576 23.9131 43.1734 22.7138 43.6052 21.6105C44.0369 20.5072 44.6605 19.4998 45.5 18.6124C46.4114 17.629 47.4668 16.8854 48.6181 16.3817C49.7694 15.8541 50.9686 15.5902 52.1919 15.5902C53.7509 15.5902 55.214 15.95 56.5572 16.6456C57.9004 17.3651 59.0517 18.3485 60.0111 19.6437C60.5867 20.4113 61.0185 21.2747 61.3063 22.2581C61.5941 23.2175 61.714 24.3209 61.714 25.5681C61.714 25.664 61.714 25.8079 61.69 26.0238C61.69 26.2397 61.6661 26.3836 61.6661 26.4795H47.2509V26.4555ZM57.2048 23.1216C56.845 21.9223 56.2454 21.0109 55.4059 20.3873C54.5664 19.7637 53.4871 19.4519 52.2159 19.4519C51.0886 19.4519 50.1052 19.7876 49.2177 20.4592C48.3303 21.1308 47.7306 22.0183 47.4188 23.1216H57.2048ZM71.7638 19.7637C70.1328 19.8836 69.0055 20.3153 68.3579 21.0349C67.7103 21.7544 67.3985 23.0496 67.3985 24.9205V34.1068H63.2011V16.1179H67.1347V18.2046C67.7583 17.3891 68.4539 16.8135 69.2214 16.4297C69.9649 16.0459 70.8284 15.8541 71.7638 15.8541V19.7637ZM32.428 1.24705C27.3432 -0.743722 8.9465 -1.46328 8.92251 9.52196C3.54982 12.9519 0 18.5404 0 24.5367C0 31.7803 5.87638 37.6567 13.1199 37.6567C20.3635 37.6567 26.2399 31.7803 26.2399 24.5367C26.2399 18.9482 22.738 14.1511 17.797 12.2803C16.8376 11.9205 14.7749 11.2729 13.1439 11.4168C10.7934 12.9039 7.91513 15.974 6.57196 19.0441C8.58672 16.6216 11.7288 15.5662 14.5351 16.022C18.6365 16.6936 21.7786 20.2434 21.7786 24.5607C21.7786 29.3338 17.917 33.1954 13.1439 33.1954C10.5055 33.1954 8.15498 32.0201 6.57196 30.1733C4.19742 27.415 3.59779 24.4408 4.07749 21.5386C5.73247 11.3688 17.797 5.58838 26.7675 3.35775C23.8413 4.9168 18.5646 7.45923 14.8708 10.2175C25.6402 14.391 27.3911 5.30056 32.428 1.24705ZM36.7934 34.1308H33.5074L26.6716 16.1179H31.1328L35.3303 28.0865L39.6476 16.1179H43.9889L36.7934 34.1308Z" fill="#1B222C"/>
|
||||
+</g>
|
||||
+<path d="M83.6127 26.4556C83.7567 27.7508 84.2843 28.7822 85.1718 29.5257C86.0592 30.2692 87.2105 30.653 88.6257 30.653C89.5611 30.653 90.4006 30.4611 91.1441 30.0774C91.8877 29.6696 92.4873 29.1419 92.919 28.4224H97.4282C96.6607 30.3892 95.5334 31.9002 93.9984 33.0036C92.4873 34.0829 90.7124 34.6346 88.7216 34.6346C87.3784 34.6346 86.1552 34.3947 85.0279 33.915C83.9006 33.4353 82.8932 32.7397 81.9817 31.8043C81.1423 30.9168 80.4707 29.9095 80.015 28.7342C79.5353 27.5829 79.3194 26.3836 79.3194 25.1604C79.3194 23.9131 79.5353 22.7139 79.967 21.6106C80.3987 20.5072 81.0223 19.4999 81.8618 18.6124C82.7733 17.629 83.8286 16.8855 84.9799 16.3818C86.1312 15.8541 87.3305 15.5903 88.5537 15.5903C90.1128 15.5903 91.5758 15.95 92.919 16.6456C94.2622 17.3652 95.4135 18.3486 96.3729 19.6438C96.9485 20.4113 97.3803 21.2748 97.6681 22.2582C97.9559 23.2176 98.0758 24.3209 98.0758 25.5681C98.0758 25.6641 98.0758 25.808 98.0519 26.0238C98.0519 26.2397 98.0279 26.3836 98.0279 26.4796H83.6127V26.4556ZM93.5426 23.1216C93.1829 21.9224 92.5832 21.0109 91.7437 20.3873C90.9043 19.7637 89.8249 19.4519 88.5537 19.4519C87.4264 19.4519 86.443 19.7877 85.5556 20.4593C84.6681 21.1309 84.0685 22.0183 83.7567 23.1216H93.5426ZM114.698 34.1309V31.9242C114.194 32.8117 113.498 33.4833 112.587 33.915C111.675 34.3467 110.5 34.5626 109.085 34.5626C106.423 34.5626 104.192 33.6512 102.417 31.8283C100.642 30.0054 99.7308 27.7508 99.7308 25.0644C99.7308 23.7932 99.9467 22.594 100.402 21.4667C100.858 20.3393 101.482 19.332 102.321 18.4685C103.209 17.5091 104.216 16.8135 105.295 16.3578C106.375 15.9021 107.622 15.6862 108.989 15.6862C110.308 15.6862 111.436 15.9021 112.371 16.3338C113.306 16.7655 114.074 17.4371 114.65 18.3246V16.1419H118.727V34.1548H114.698V34.1309ZM104.024 24.9685C104.024 26.4796 104.528 27.7508 105.535 28.7822C106.543 29.8135 107.766 30.3172 109.229 30.3172C110.548 30.3172 111.699 29.8135 112.707 28.7822C113.714 27.7508 114.218 26.5515 114.218 25.1844C114.218 23.7213 113.714 22.474 112.707 21.4187C111.699 20.3633 110.524 19.8357 109.157 19.8357C107.742 19.8357 106.543 20.3393 105.535 21.3227C104.528 22.3301 104.024 23.5294 104.024 24.9685ZM129.904 16.1179V19.8596H126.882V34.1309H122.829V19.8596H120.694V16.1179H122.709V15.6382C122.709 13.7434 123.236 12.3283 124.268 11.3929C125.323 10.4574 126.906 10.0017 129.041 10.0017C129.113 10.0017 129.257 10.0017 129.449 10.0257C129.64 10.0257 129.784 10.0497 129.904 10.0497V13.8154H129.616C128.657 13.8154 127.985 13.9833 127.578 14.2711C127.17 14.5829 126.954 15.0866 126.954 15.8301V16.1659H129.904V16.1179ZM73.5869 34.1309H77.6884V10.2895H73.5869V34.1309Z" fill="#1B222C"/>
|
||||
+</svg>
|
||||
diff --git a/services/web/frontend/js/shared/svgs/overleaf-white.svg b/services/web/frontend/js/shared/svgs/overleaf-white.svg
|
||||
new file mode 100644
|
||||
index 00000000000..2ced81aa46d
|
||||
--- /dev/null
|
||||
+++ b/services/web/frontend/js/shared/svgs/overleaf-white.svg
|
||||
@@ -0,0 +1 @@
|
||||
+<svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 542 157" enable-background="new 0 0 542 157"><style>.st0{filter:url(#Adobe_OpacityMaskFilter);} .st1{fill:#FFFFFF;} .st2{mask:url(#mask-2);fill:#FFFFFF;}</style><g id="Page-1"><g id="Overleaf"><g id="Group-3"><defs><filter id="Adobe_OpacityMaskFilter" filterUnits="userSpaceOnUse" x="0" y=".3" width="299.2" height="156.7"><feColorMatrix values="1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 1 0"/></filter></defs><mask maskUnits="userSpaceOnUse" x="0" y=".3" width="299.2" height="156.7" id="mask-2"><g class="st0"><path id="path-1" class="st1" d="M299.2 156.9H.1V.3h299.1z"/></g></mask><path id="Fill-1" class="st2" d="M197 110.3c.6 5.4 2.7 9.7 6.5 12.8 3.7 3.1 8.5 4.7 14.4 4.7 3.9 0 7.4-.8 10.5-2.4 3.1-1.7 5.6-3.9 7.4-6.9h18.8c-3.2 8.2-7.9 14.5-14.3 19.1-6.3 4.5-13.7 6.8-22 6.8-5.6 0-10.7-1-15.4-3-4.7-2-8.9-4.9-12.7-8.8-3.5-3.7-6.3-7.9-8.2-12.8s-2.9-9.8-2.9-14.9c0-5.2.9-10.2 2.7-14.8 1.8-4.6 4.4-8.8 7.9-12.5 3.8-4.1 8.2-7.2 13-9.3 4.8-2.2 9.8-3.3 14.9-3.3 6.5 0 12.6 1.5 18.2 4.4 5.6 3 10.4 7.1 14.4 12.5 2.4 3.2 4.2 6.8 5.4 10.9 1.2 4 1.7 8.6 1.7 13.8 0 .4 0 1-.1 1.9 0 .9-.1 1.5-.1 1.9H197v-.1zm41.5-13.9c-1.5-5-4-8.8-7.5-11.4-3.5-2.6-8-3.9-13.3-3.9-4.7 0-8.8 1.4-12.5 4.2-3.7 2.8-6.2 6.5-7.5 11.1h40.8zm60.7-14c-6.8.5-11.5 2.3-14.2 5.3-2.7 3-4 8.4-4 16.2v38.3h-17.5v-75h16.4v8.7c2.6-3.4 5.5-5.8 8.7-7.4 3.1-1.6 6.7-2.4 10.6-2.4v16.3zm-164-77.2C114-3.1 37.3-6.1 37.2 39.7 14.8 54 0 77.3 0 102.3 0 132.5 24.5 157 54.7 157c30.2 0 54.7-24.5 54.7-54.7 0-23.3-14.6-43.3-35.2-51.1-4-1.5-12.6-4.2-19.4-3.6-9.8 6.2-21.8 19-27.4 31.8 8.4-10.1 21.5-14.5 33.2-12.6 17.1 2.8 30.2 17.6 30.2 35.6 0 19.9-16.1 36-36 36-11 0-20.8-4.9-27.4-12.6-9.9-11.5-12.4-23.9-10.4-36 6.9-42.4 57.2-66.5 94.6-75.8C99.4 20.5 77.4 31.1 62 42.6c44.9 17.4 52.2-20.5 73.2-37.4zm18.2 137.1h-13.7l-28.5-75.1h18.6l17.5 49.9 18-49.9h18.1l-30 75.1z"/></g><path id="Fill-4" class="st1" d="M348.6 110.3c.6 5.4 2.8 9.7 6.5 12.8 3.7 3.1 8.5 4.7 14.4 4.7 3.9 0 7.4-.8 10.5-2.4 3.1-1.7 5.6-3.9 7.4-6.9h18.8c-3.2 8.2-7.9 14.5-14.3 19.1-6.3 4.5-13.7 6.8-22 6.8-5.6 0-10.7-1-15.4-3-4.7-2-8.9-4.9-12.7-8.8-3.5-3.7-6.3-7.9-8.2-12.8-2-4.8-2.9-9.8-2.9-14.9 0-5.2.9-10.2 2.7-14.8 1.8-4.6 4.4-8.8 7.9-12.5 3.8-4.1 8.2-7.2 13-9.3 4.8-2.2 9.8-3.3 14.9-3.3 6.5 0 12.6 1.5 18.2 4.4 5.6 3 10.4 7.1 14.4 12.5 2.4 3.2 4.2 6.8 5.4 10.9 1.2 4 1.7 8.6 1.7 13.8 0 .4 0 1-.1 1.9 0 .9-.1 1.5-.1 1.9h-60.1v-.1zM390 96.4c-1.5-5-4-8.8-7.5-11.4-3.5-2.6-8-3.9-13.3-3.9-4.7 0-8.8 1.4-12.5 4.2-3.7 2.8-6.2 6.5-7.5 11.1H390zm88.2 45.9v-9.2c-2.1 3.7-5 6.5-8.8 8.3-3.8 1.8-8.7 2.7-14.6 2.7-11.1 0-20.4-3.8-27.8-11.4-7.4-7.6-11.2-17-11.2-28.2 0-5.3.9-10.3 2.8-15 1.9-4.7 4.5-8.9 8-12.5 3.7-4 7.9-6.9 12.4-8.8s9.7-2.8 15.4-2.8c5.5 0 10.2.9 14.1 2.7 3.9 1.8 7.1 4.6 9.5 8.3v-9.1h17v75.1h-16.8v-.1zm-44.5-38.2c0 6.3 2.1 11.6 6.3 15.9 4.2 4.3 9.3 6.4 15.4 6.4 5.5 0 10.3-2.1 14.5-6.4 4.2-4.3 6.3-9.3 6.3-15 0-6.1-2.1-11.3-6.3-15.7-4.2-4.4-9.1-6.6-14.8-6.6-5.9 0-10.9 2.1-15.1 6.2-4.2 4.2-6.3 9.2-6.3 15.2zm107.9-36.9v15.6H529v59.5h-16.9V82.8h-8.9V67.2h8.4v-2c0-7.9 2.2-13.8 6.5-17.7 4.4-3.9 11-5.8 19.9-5.8.3 0 .9 0 1.7.1.8 0 1.4.1 1.9.1v15.7h-1.2c-4 0-6.8.7-8.5 1.9-1.7 1.3-2.6 3.4-2.6 6.5v1.4h12.3v-.2zm-234.8 75.1h17.1V42.9h-17.1v99.4z"/></g></g></svg>
|
||||
\ No newline at end of file
|
||||
diff --git a/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss b/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
|
||||
index 5d28341cf53..dd0600ed15d 100644
|
||||
--- a/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
|
||||
+++ b/services/web/frontend/stylesheets/bootstrap-5/components/nav.scss
|
||||
@@ -8,7 +8,10 @@
|
||||
--navbar-padding-h: var(--spacing-05);
|
||||
--navbar-padding: 0 var(--navbar-padding-h);
|
||||
--navbar-brand-width: 130px;
|
||||
- --navbar-brand-image-url: url('../../../../public/img/ol-brand/overleaf-white.svg');
|
||||
+ --navbar-brand-image-url: var(
|
||||
+ --navbar-brand-image-default-url,
|
||||
+ url('../../../../public/img/ol-brand/overleaf-white.svg')
|
||||
+ );
|
||||
|
||||
// Title, when used instead of a logo
|
||||
--navbar-title-font-size: var(--font-size-05);
|
||||
diff --git a/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss b/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
|
||||
index 3b984bb6f36..a8855ea1ca3 100644
|
||||
--- a/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
|
||||
+++ b/services/web/frontend/stylesheets/bootstrap-5/components/navbar.scss
|
||||
@@ -216,7 +216,10 @@
|
||||
.website-redesign .navbar-default {
|
||||
--navbar-title-color: var(--content-primary);
|
||||
--navbar-title-color-hover: var(--content-secondary);
|
||||
- --navbar-brand-image-url: url('../../../../public/img/ol-brand/overleaf-black.svg');
|
||||
+ --navbar-brand-image-url: var(
|
||||
+ --navbar-brand-image-redesign-url,
|
||||
+ url('../../../../public/img/ol-brand/overleaf-black.svg')
|
||||
+ );
|
||||
--navbar-subdued-color: var(--content-primary);
|
||||
--navbar-subdued-hover-bg: var(--bg-dark-primary);
|
||||
--navbar-subdued-hover-color: var(--content-primary-dark);
|
||||
diff --git a/services/web/types/css-properties-with-variables.tsx b/services/web/types/css-properties-with-variables.tsx
|
||||
new file mode 100644
|
||||
index 00000000000..fe0e85902a6
|
||||
--- /dev/null
|
||||
+++ b/services/web/types/css-properties-with-variables.tsx
|
||||
@@ -0,0 +1,4 @@
|
||||
+import { CSSProperties } from 'react'
|
||||
+
|
||||
+export type CSSPropertiesWithVariables = CSSProperties &
|
||||
+ Record<`--${string}`, number | string>
|
||||
--
|
||||
2.43.0
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
diff --git a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
||||
index 29f5e7ffd26..46be91a1d9c 100644
|
||||
--- a/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
||||
+++ b/services/web/modules/server-ce-scripts/scripts/check-mongodb.mjs
|
||||
@@ -9,6 +9,34 @@ const { ObjectId } = mongodb
|
||||
const MIN_MONGO_VERSION = [6, 0]
|
||||
const MIN_MONGO_FEATURE_COMPATIBILITY_VERSION = [6, 0]
|
||||
|
||||
+// Allow ignoring admin check failures via an environment variable
|
||||
+const OVERRIDE_ENV_VAR_NAME = 'ALLOW_MONGO_ADMIN_CHECK_FAILURES'
|
||||
+
|
||||
+function shouldSkipAdminChecks() {
|
||||
+ return process.env[OVERRIDE_ENV_VAR_NAME] === 'true'
|
||||
+}
|
||||
+
|
||||
+function handleUnauthorizedError(err, feature) {
|
||||
+ if (
|
||||
+ err instanceof mongodb.MongoServerError &&
|
||||
+ err.codeName === 'Unauthorized'
|
||||
+ ) {
|
||||
+ console.warn(`Warning: failed to check ${feature} (not authorised)`)
|
||||
+ if (!shouldSkipAdminChecks()) {
|
||||
+ console.error(
|
||||
+ `Please ensure the MongoDB user has the required admin permissions, or\n` +
|
||||
+ `set the environment variable ${OVERRIDE_ENV_VAR_NAME}=true to ignore this check.`
|
||||
+ )
|
||||
+ process.exit(1)
|
||||
+ }
|
||||
+ console.warn(
|
||||
+ `Ignoring ${feature} check failure (${OVERRIDE_ENV_VAR_NAME}=${process.env[OVERRIDE_ENV_VAR_NAME]})`
|
||||
+ )
|
||||
+ } else {
|
||||
+ throw err
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
async function main() {
|
||||
let mongoClient
|
||||
try {
|
||||
@@ -18,8 +46,16 @@ async function main() {
|
||||
throw err
|
||||
}
|
||||
|
||||
- await checkMongoVersion(mongoClient)
|
||||
- await checkFeatureCompatibilityVersion(mongoClient)
|
||||
+ try {
|
||||
+ await checkMongoVersion(mongoClient)
|
||||
+ } catch (err) {
|
||||
+ handleUnauthorizedError(err, 'MongoDB version')
|
||||
+ }
|
||||
+ try {
|
||||
+ await checkFeatureCompatibilityVersion(mongoClient)
|
||||
+ } catch (err) {
|
||||
+ handleUnauthorizedError(err, 'MongoDB feature compatibility version')
|
||||
+ }
|
||||
|
||||
try {
|
||||
await testTransactions(mongoClient)
|
|
@ -1,4 +1,4 @@
|
|||
FROM node:22.17.0
|
||||
FROM node:22.15.1
|
||||
RUN curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||
&& echo \
|
||||
"deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/debian $(. /etc/os-release && echo "$VERSION_CODENAME") stable" \
|
||||
|
|
|
@ -6,8 +6,8 @@ all: test-e2e
|
|||
# Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance).
|
||||
export PWD = $(shell pwd)
|
||||
|
||||
export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1
|
||||
export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1
|
||||
export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1
|
||||
export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1
|
||||
export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest
|
||||
export CYPRESS_SHARD ?=
|
||||
export COMPOSE_PROJECT_NAME ?= test
|
||||
|
@ -20,12 +20,9 @@ test-e2e-native:
|
|||
npm run cypress:open
|
||||
|
||||
test-e2e:
|
||||
docker compose build host-admin
|
||||
docker compose up -d host-admin
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e e2e
|
||||
|
||||
test-e2e-open:
|
||||
docker compose up -d host-admin
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e-open e2e-open
|
||||
|
||||
clean:
|
||||
|
@ -48,7 +45,7 @@ prefetch_custom_compose_pull:
|
|||
prefetch_custom: prefetch_custom_texlive
|
||||
prefetch_custom_texlive:
|
||||
echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \
|
||||
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/*/}; docker pull $$tag; docker tag $$tag $$re_tag'
|
||||
sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag'
|
||||
|
||||
prefetch_custom: prefetch_old
|
||||
prefetch_old:
|
||||
|
|
|
@ -179,21 +179,6 @@ describe('admin panel', function () {
|
|||
cy.get('nav').findByText('Manage Users').click()
|
||||
})
|
||||
|
||||
it('displays expected tabs', () => {
|
||||
const tabs = ['Users', 'License Usage']
|
||||
cy.get('[role="tab"]').each((el, index) => {
|
||||
cy.wrap(el).findByText(tabs[index]).click()
|
||||
})
|
||||
cy.get('[role="tab"]').should('have.length', tabs.length)
|
||||
})
|
||||
|
||||
it('license usage tab', () => {
|
||||
cy.get('a').contains('License Usage').click()
|
||||
cy.findByText(
|
||||
'An active user is one who has opened a project in this Server Pro instance in the last 12 months.'
|
||||
)
|
||||
})
|
||||
|
||||
describe('create users', () => {
|
||||
beforeEach(() => {
|
||||
cy.get('a').contains('New User').click()
|
||||
|
|
|
@ -40,15 +40,9 @@ describe('Project creation and compilation', function () {
|
|||
cy.get('.cm-line').should('have.length', 1)
|
||||
cy.get('.cm-line').type(markdownContent)
|
||||
cy.findByText('main.tex').click()
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).should(
|
||||
'contain.text',
|
||||
'\\maketitle'
|
||||
)
|
||||
cy.get('.cm-content').should('contain.text', '\\maketitle')
|
||||
cy.findByText(fileName).click()
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).should(
|
||||
'contain.text',
|
||||
markdownContent
|
||||
)
|
||||
cy.get('.cm-content').should('contain.text', markdownContent)
|
||||
})
|
||||
|
||||
it('can link and display linked image from other project', function () {
|
||||
|
|
|
@ -35,7 +35,7 @@ services:
|
|||
MAILTRAP_PASSWORD: 'password-for-mailtrap'
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:6.0
|
||||
command: '--replSet overleaf'
|
||||
volumes:
|
||||
- ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
@ -91,7 +91,6 @@ services:
|
|||
volumes:
|
||||
- ./:/e2e
|
||||
- /tmp/.X11-unix:/tmp/.X11-unix
|
||||
- ${XAUTHORITY:-/dev/null}:/home/node/.Xauthority
|
||||
user: "${DOCKER_USER:-1000:1000}"
|
||||
environment:
|
||||
CYPRESS_SHARD:
|
||||
|
@ -132,7 +131,7 @@ services:
|
|||
|
||||
saml:
|
||||
restart: always
|
||||
image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test
|
||||
image: gcr.io/overleaf-ops/saml-test
|
||||
environment:
|
||||
SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml'
|
||||
SAML_BASE_URL_PATH: 'http://saml/simplesaml/'
|
||||
|
|
|
@ -2,7 +2,6 @@ import {
|
|||
createNewFile,
|
||||
createProject,
|
||||
openProjectById,
|
||||
testNewFileUpload,
|
||||
} from './helpers/project'
|
||||
import { isExcludedBySharding, startWith } from './helpers/config'
|
||||
import { ensureUserExists, login } from './helpers/login'
|
||||
|
@ -105,10 +104,7 @@ describe('editor', () => {
|
|||
force: true,
|
||||
})
|
||||
cy.get('button').contains('𝜉').click()
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).should(
|
||||
'contain.text',
|
||||
'\\xi'
|
||||
)
|
||||
cy.get('.cm-content').should('contain.text', '\\xi')
|
||||
|
||||
cy.log('recompile to force flush and avoid "unsaved changes" prompt')
|
||||
recompile()
|
||||
|
@ -120,7 +116,24 @@ describe('editor', () => {
|
|||
cy.get('button').contains('New file').click({ force: true })
|
||||
})
|
||||
|
||||
testNewFileUpload()
|
||||
it('can upload file', () => {
|
||||
const name = `${uuid()}.txt`
|
||||
const content = `Test File Content ${name}`
|
||||
cy.get('button').contains('Upload').click({ force: true })
|
||||
cy.get('input[type=file]')
|
||||
.first()
|
||||
.selectFile(
|
||||
{
|
||||
contents: Cypress.Buffer.from(content),
|
||||
fileName: name,
|
||||
lastModified: Date.now(),
|
||||
},
|
||||
{ force: true }
|
||||
)
|
||||
// force: The file-tree pane is too narrow to display the full name.
|
||||
cy.findByTestId('file-tree').findByText(name).click({ force: true })
|
||||
cy.findByText(content)
|
||||
})
|
||||
|
||||
it('should not display import from URL', () => {
|
||||
cy.findByText('From external URL').should('not.exist')
|
||||
|
@ -134,9 +147,8 @@ describe('editor', () => {
|
|||
|
||||
it('can download project sources', () => {
|
||||
cy.get('a').contains('Source').click()
|
||||
const zipName = projectName.replaceAll('-', '_')
|
||||
cy.task('readFileInZip', {
|
||||
pathToZip: `cypress/downloads/${zipName}.zip`,
|
||||
pathToZip: `cypress/downloads/${projectName}.zip`,
|
||||
fileToRead: 'main.tex',
|
||||
}).should('contain', 'Your introduction goes here')
|
||||
})
|
||||
|
|
|
@ -1,104 +0,0 @@
|
|||
import { ensureUserExists, login } from './helpers/login'
|
||||
import {
|
||||
createProject,
|
||||
openProjectById,
|
||||
prepareFileUploadTest,
|
||||
} from './helpers/project'
|
||||
import { isExcludedBySharding, startWith } from './helpers/config'
|
||||
import { prepareWaitForNextCompileSlot } from './helpers/compile'
|
||||
import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
import { purgeFilestoreData, runScript } from './helpers/hostAdminClient'
|
||||
|
||||
describe('filestore migration', function () {
|
||||
if (isExcludedBySharding('CE_CUSTOM_3')) return
|
||||
startWith({ withDataDir: true, resetData: true, vars: {} })
|
||||
ensureUserExists({ email: 'user@example.com' })
|
||||
|
||||
let projectName: string
|
||||
let projectId: string
|
||||
let waitForCompileRateLimitCoolOff: (fn: () => void) => void
|
||||
const previousBinaryFiles: (() => void)[] = []
|
||||
beforeWithReRunOnTestRetry(function () {
|
||||
projectName = `project-${uuid()}`
|
||||
login('user@example.com')
|
||||
createProject(projectName, { type: 'Example project' }).then(
|
||||
id => (projectId = id)
|
||||
)
|
||||
let queueReset
|
||||
;({ waitForCompileRateLimitCoolOff, queueReset } =
|
||||
prepareWaitForNextCompileSlot())
|
||||
queueReset()
|
||||
previousBinaryFiles.push(prepareFileUploadTest(true))
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
login('user@example.com')
|
||||
waitForCompileRateLimitCoolOff(() => {
|
||||
openProjectById(projectId)
|
||||
})
|
||||
})
|
||||
|
||||
function checkFilesAreAccessible() {
|
||||
it('can upload new binary file and read previous uploads', function () {
|
||||
previousBinaryFiles.push(prepareFileUploadTest(true))
|
||||
for (const check of previousBinaryFiles) {
|
||||
check()
|
||||
}
|
||||
})
|
||||
|
||||
it('renders frog jpg', () => {
|
||||
cy.findByTestId('file-tree').findByText('frog.jpg').click()
|
||||
cy.get('[alt="frog.jpg"]')
|
||||
.should('be.visible')
|
||||
.and('have.prop', 'naturalWidth')
|
||||
.should('be.greaterThan', 0)
|
||||
})
|
||||
}
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL not set', function () {
|
||||
startWith({ withDataDir: true, vars: {} })
|
||||
checkFilesAreAccessible()
|
||||
})
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=0', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '0' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=1', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('OVERLEAF_FILESTORE_MIGRATION_LEVEL=2', function () {
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '1' },
|
||||
})
|
||||
before(async function () {
|
||||
await runScript({
|
||||
cwd: 'services/history-v1',
|
||||
script: 'storage/scripts/back_fill_file_hash.mjs',
|
||||
})
|
||||
})
|
||||
startWith({
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '2' },
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('purge filestore data', function () {
|
||||
before(async function () {
|
||||
await purgeFilestoreData()
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -107,7 +107,7 @@ describe('git-bridge', function () {
|
|||
cy.get('code').contains(`git clone ${gitURL(id.toString())}`)
|
||||
})
|
||||
cy.findByText('Generate token').should('not.exist')
|
||||
cy.findByText(/generate a new one in Account settings/)
|
||||
cy.findByText(/generate a new one in Account settings/i)
|
||||
cy.findByText('Go to settings')
|
||||
.should('have.attr', 'target', '_blank')
|
||||
.and('have.attr', 'href', '/user/settings')
|
||||
|
|
|
@ -9,14 +9,6 @@ export function throttledRecompile() {
|
|||
return recompile
|
||||
}
|
||||
|
||||
export function stopCompile(options: { delay?: number } = {}) {
|
||||
const { delay = 0 } = options
|
||||
cy.wait(delay)
|
||||
cy.log('Stop compile')
|
||||
cy.findByRole('button', { name: 'Toggle compile options menu' }).click()
|
||||
cy.findByRole('menuitem', { name: 'Stop compilation' }).click()
|
||||
}
|
||||
|
||||
export function prepareWaitForNextCompileSlot() {
|
||||
let lastCompile = 0
|
||||
function queueReset() {
|
||||
|
|
|
@ -9,7 +9,6 @@ export function isExcludedBySharding(
|
|||
| 'CE_DEFAULT'
|
||||
| 'CE_CUSTOM_1'
|
||||
| 'CE_CUSTOM_2'
|
||||
| 'CE_CUSTOM_3'
|
||||
| 'PRO_DEFAULT_1'
|
||||
| 'PRO_DEFAULT_2'
|
||||
| 'PRO_CUSTOM_1'
|
||||
|
|
|
@ -85,12 +85,6 @@ export async function getRedisKeys() {
|
|||
return stdout.split('\n')
|
||||
}
|
||||
|
||||
export async function purgeFilestoreData() {
|
||||
await fetchJSON(`${hostAdminURL}/data/user_files`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
}
|
||||
|
||||
async function sleep(ms: number) {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(resolve, ms)
|
||||
|
|
|
@ -37,8 +37,7 @@ export function createProject(
|
|||
}
|
||||
cy.findAllByRole('button').contains(newProjectButtonMatcher).click()
|
||||
// FIXME: This should only look in the left menu
|
||||
// The upgrading tests create projects in older versions of Server Pro which used different casing of the project type. Use case-insensitive match.
|
||||
cy.findAllByText(type, { exact: false }).first().click()
|
||||
cy.findAllByText(new RegExp(type, 'i')).first().click()
|
||||
cy.findByRole('dialog').within(() => {
|
||||
cy.get('input').type(name)
|
||||
cy.findByText('Create').click()
|
||||
|
@ -216,43 +215,3 @@ export function createNewFile() {
|
|||
|
||||
return fileName
|
||||
}
|
||||
|
||||
export function prepareFileUploadTest(binary = false) {
|
||||
const name = `${uuid()}.txt`
|
||||
const content = `Test File Content ${name}${binary ? ' \x00' : ''}`
|
||||
cy.get('button').contains('Upload').click({ force: true })
|
||||
cy.get('input[type=file]')
|
||||
.first()
|
||||
.selectFile(
|
||||
{
|
||||
contents: Cypress.Buffer.from(content),
|
||||
fileName: name,
|
||||
lastModified: Date.now(),
|
||||
},
|
||||
{ force: true }
|
||||
)
|
||||
|
||||
// wait for the upload to finish
|
||||
cy.findByRole('treeitem', { name })
|
||||
|
||||
return function check() {
|
||||
cy.findByRole('treeitem', { name }).click()
|
||||
if (binary) {
|
||||
cy.findByText(content).should('not.have.class', 'cm-line')
|
||||
} else {
|
||||
cy.findByText(content).should('have.class', 'cm-line')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function testNewFileUpload() {
|
||||
it('can upload text file', () => {
|
||||
const check = prepareFileUploadTest(false)
|
||||
check()
|
||||
})
|
||||
|
||||
it('can upload binary file', () => {
|
||||
const check = prepareFileUploadTest(true)
|
||||
check()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -29,17 +29,6 @@ const IMAGES = {
|
|||
PRO: process.env.IMAGE_TAG_PRO.replace(/:.+/, ''),
|
||||
}
|
||||
|
||||
function defaultDockerComposeOverride() {
|
||||
return {
|
||||
services: {
|
||||
sharelatex: {
|
||||
environment: {},
|
||||
},
|
||||
'git-bridge': {},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let previousConfig = ''
|
||||
|
||||
function readDockerComposeOverride() {
|
||||
|
@ -49,7 +38,14 @@ function readDockerComposeOverride() {
|
|||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
return defaultDockerComposeOverride
|
||||
return {
|
||||
services: {
|
||||
sharelatex: {
|
||||
environment: {},
|
||||
},
|
||||
'git-bridge': {},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,21 +77,12 @@ app.use(bodyParser.json())
|
|||
app.use((req, res, next) => {
|
||||
// Basic access logs
|
||||
console.log(req.method, req.url, req.body)
|
||||
const json = res.json
|
||||
res.json = body => {
|
||||
console.log(req.method, req.url, req.body, '->', body)
|
||||
json.call(res, body)
|
||||
}
|
||||
next()
|
||||
})
|
||||
app.use((req, res, next) => {
|
||||
// Add CORS headers
|
||||
const accessControlAllowOrigin =
|
||||
process.env.ACCESS_CONTROL_ALLOW_ORIGIN || 'http://sharelatex'
|
||||
res.setHeader('Access-Control-Allow-Origin', accessControlAllowOrigin)
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type')
|
||||
res.setHeader('Access-Control-Max-Age', '3600')
|
||||
res.setHeader('Access-Control-Allow-Methods', 'DELETE, GET, HEAD, POST, PUT')
|
||||
next()
|
||||
})
|
||||
|
||||
|
@ -146,7 +133,6 @@ const allowedVars = Joi.object(
|
|||
'V1_HISTORY_URL',
|
||||
'SANDBOXED_COMPILES',
|
||||
'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES',
|
||||
'OVERLEAF_FILESTORE_MIGRATION_LEVEL',
|
||||
'OVERLEAF_TEMPLATES_USER_ID',
|
||||
'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS',
|
||||
'OVERLEAF_ALLOW_PUBLIC_ACCESS',
|
||||
|
@ -333,19 +319,8 @@ app.get('/redis/keys', (req, res) => {
|
|||
)
|
||||
})
|
||||
|
||||
app.delete('/data/user_files', (req, res) => {
|
||||
runDockerCompose(
|
||||
'exec',
|
||||
['sharelatex', 'rm', '-rf', '/var/lib/overleaf/data/user_files'],
|
||||
(error, stdout, stderr) => {
|
||||
res.json({ error, stdout, stderr })
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
app.use(handleValidationErrors())
|
||||
|
||||
purgeDataDir()
|
||||
writeDockerComposeOverride(defaultDockerComposeOverride())
|
||||
|
||||
app.listen(80)
|
||||
|
|
|
@ -44,9 +44,8 @@ describe('Project List', () => {
|
|||
cy.findByRole('button', { name: 'Download .zip file' }).click()
|
||||
)
|
||||
|
||||
const zipName = projectName.replaceAll('-', '_')
|
||||
cy.task('readFileInZip', {
|
||||
pathToZip: `cypress/downloads/${zipName}.zip`,
|
||||
pathToZip: `cypress/downloads/${projectName}.zip`,
|
||||
fileToRead: 'main.tex',
|
||||
}).should('contain', 'Your introduction goes here')
|
||||
})
|
||||
|
|
|
@ -55,15 +55,8 @@ describe('Project Sharing', function () {
|
|||
|
||||
function expectContentReadOnlyAccess() {
|
||||
cy.url().should('match', /\/project\/[a-fA-F0-9]{24}/)
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).should(
|
||||
'contain.text',
|
||||
'\\maketitle'
|
||||
)
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).should(
|
||||
'have.attr',
|
||||
'contenteditable',
|
||||
'false'
|
||||
)
|
||||
cy.get('.cm-content').should('contain.text', '\\maketitle')
|
||||
cy.get('.cm-content').should('have.attr', 'contenteditable', 'false')
|
||||
}
|
||||
|
||||
function expectContentWriteAccess() {
|
||||
|
@ -71,23 +64,13 @@ describe('Project Sharing', function () {
|
|||
cy.url().should('match', /\/project\/[a-fA-F0-9]{24}/)
|
||||
const recompile = throttledRecompile()
|
||||
// wait for the editor to finish loading
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).should(
|
||||
'contain.text',
|
||||
'\\maketitle'
|
||||
)
|
||||
cy.get('.cm-content').should('contain.text', '\\maketitle')
|
||||
// the editor should be writable
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).should(
|
||||
'have.attr',
|
||||
'contenteditable',
|
||||
'true'
|
||||
)
|
||||
cy.get('.cm-content').should('have.attr', 'contenteditable', 'true')
|
||||
cy.findByText('\\maketitle').parent().click()
|
||||
cy.findByText('\\maketitle').parent().type(`\n\\section{{}${section}}`)
|
||||
// should have written
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).should(
|
||||
'contain.text',
|
||||
`\\section{${section}}`
|
||||
)
|
||||
cy.get('.cm-content').should('contain.text', `\\section{${section}}`)
|
||||
// check PDF
|
||||
recompile()
|
||||
cy.get('.pdf-viewer').should('contain.text', projectName)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { ensureUserExists, login } from './helpers/login'
|
||||
import { createProject } from './helpers/project'
|
||||
import { isExcludedBySharding, startWith } from './helpers/config'
|
||||
import { throttledRecompile, stopCompile } from './helpers/compile'
|
||||
import { throttledRecompile } from './helpers/compile'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
import { waitUntilScrollingFinished } from './helpers/waitUntilScrollingFinished'
|
||||
import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry'
|
||||
|
@ -56,40 +56,8 @@ describe('SandboxedCompiles', function () {
|
|||
checkSyncTeX()
|
||||
checkXeTeX()
|
||||
checkRecompilesAfterErrors()
|
||||
checkStopCompile()
|
||||
})
|
||||
|
||||
function checkStopCompile() {
|
||||
it('users can stop a running compile', function () {
|
||||
login('user@example.com')
|
||||
createProject('test-project')
|
||||
// create an infinite loop in the main document
|
||||
// this will cause the compile to run indefinitely
|
||||
cy.findByText('\\maketitle').parent().click()
|
||||
cy.findByText('\\maketitle')
|
||||
.parent()
|
||||
.type('\n\\def\\x{{}Hello!\\par\\x}\\x')
|
||||
cy.log('Start compile')
|
||||
// We need to start the compile manually because we do not want to wait for it to finish
|
||||
cy.findByText('Recompile').click()
|
||||
// Now stop the compile and kill the latex process
|
||||
stopCompile({ delay: 1000 })
|
||||
cy.get('.logs-pane')
|
||||
.invoke('text')
|
||||
.should('match', /PDF Rendering Error|Compilation cancelled/)
|
||||
// Check that the previous compile is not running in the background by
|
||||
// disabling the infinite loop and recompiling
|
||||
cy.findByText('\\def').parent().click()
|
||||
cy.findByText('\\def').parent().type('{home}disabled loop% ')
|
||||
cy.findByText('Recompile').click()
|
||||
cy.get('.pdf-viewer').should('contain.text', 'disabled loop')
|
||||
cy.get('.logs-pane').should(
|
||||
'not.contain.text',
|
||||
'A previous compile is still running'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function checkSyncTeX() {
|
||||
// TODO(25342): re-enable
|
||||
// eslint-disable-next-line mocha/no-skipped-tests
|
||||
|
@ -161,9 +129,7 @@ describe('SandboxedCompiles', function () {
|
|||
})
|
||||
|
||||
cy.log('navigate to Section A')
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).within(
|
||||
() => cy.findByText('Section A').click()
|
||||
)
|
||||
cy.get('.cm-content').within(() => cy.findByText('Section A').click())
|
||||
cy.get('[aria-label="Go to code location in PDF"]').click()
|
||||
cy.get('@title').then((title: any) => {
|
||||
waitUntilScrollingFinished('.pdfjs-viewer-inner', title)
|
||||
|
@ -172,9 +138,7 @@ describe('SandboxedCompiles', function () {
|
|||
})
|
||||
|
||||
cy.log('navigate to Section B')
|
||||
cy.findByRole('textbox', { name: /Source Editor editing/i }).within(
|
||||
() => cy.findByText('Section B').click()
|
||||
)
|
||||
cy.get('.cm-content').within(() => cy.findByText('Section B').click())
|
||||
cy.get('[aria-label="Go to code location in PDF"]').click()
|
||||
cy.get('@sectionA').then((title: any) => {
|
||||
waitUntilScrollingFinished('.pdfjs-viewer-inner', title)
|
||||
|
@ -263,7 +227,6 @@ describe('SandboxedCompiles', function () {
|
|||
checkSyncTeX()
|
||||
checkXeTeX()
|
||||
checkRecompilesAfterErrors()
|
||||
checkStopCompile()
|
||||
})
|
||||
|
||||
describe.skip('unavailable in CE', function () {
|
||||
|
@ -278,6 +241,5 @@ describe('SandboxedCompiles', function () {
|
|||
checkSyncTeX()
|
||||
checkXeTeX()
|
||||
checkRecompilesAfterErrors()
|
||||
checkStopCompile()
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/overleaf/internal/
|
||||
|
||||
FROM node:22.17.0 AS base
|
||||
FROM node:22.15.1 AS base
|
||||
|
||||
WORKDIR /overleaf/services/chat
|
||||
|
||||
|
|
|
@ -32,12 +32,12 @@ HERE=$(shell pwd)
|
|||
MONOREPO=$(shell cd ../../ && pwd)
|
||||
# Run the linting commands in the scope of the monorepo.
|
||||
# Eslint and prettier (plus some configs) are on the root.
|
||||
RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:22.17.0 npm run --silent
|
||||
RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:22.15.1 npm run --silent
|
||||
|
||||
RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent
|
||||
|
||||
# Same but from the top of the monorepo
|
||||
RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:22.17.0 npm run --silent
|
||||
RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:22.15.1 npm run --silent
|
||||
|
||||
SHELLCHECK_OPTS = \
|
||||
--shell=bash \
|
||||
|
|
|
@ -4,6 +4,6 @@ chat
|
|||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--node-version=22.17.0
|
||||
--node-version=22.15.1
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
|
|
|
@ -24,13 +24,10 @@ services:
|
|||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
volumes:
|
||||
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||
depends_on:
|
||||
mongo:
|
||||
condition: service_started
|
||||
user: node
|
||||
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||
command: npm run test:acceptance
|
||||
|
||||
|
||||
|
@ -42,7 +39,7 @@ services:
|
|||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -6,7 +6,7 @@ version: "2.3"
|
|||
|
||||
services:
|
||||
test_unit:
|
||||
image: node:22.17.0
|
||||
image: node:22.15.1
|
||||
volumes:
|
||||
- .:/overleaf/services/chat
|
||||
- ../../node_modules:/overleaf/node_modules
|
||||
|
@ -21,12 +21,11 @@ services:
|
|||
user: node
|
||||
|
||||
test_acceptance:
|
||||
image: node:22.17.0
|
||||
image: node:22.15.1
|
||||
volumes:
|
||||
- .:/overleaf/services/chat
|
||||
- ../../node_modules:/overleaf/node_modules
|
||||
- ../../libraries:/overleaf/libraries
|
||||
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||
working_dir: /overleaf/services/chat
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
|
@ -40,11 +39,10 @@ services:
|
|||
depends_on:
|
||||
mongo:
|
||||
condition: service_started
|
||||
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=0 --
|
||||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:8.0.11
|
||||
image: mongo:7.0.20
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
|
|
|
@ -1 +1 @@
|
|||
22.17.0
|
||||
22.15.1
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/overleaf/internal/
|
||||
|
||||
FROM node:22.17.0 AS base
|
||||
FROM node:22.15.1 AS base
|
||||
|
||||
WORKDIR /overleaf/services/clsi
|
||||
COPY services/clsi/install_deps.sh /overleaf/services/clsi/
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue