diff --git a/.eslintrc b/.eslintrc
new file mode 100644
index 0000000000..1efb79d093
--- /dev/null
+++ b/.eslintrc
@@ -0,0 +1,17 @@
+{
+ "extends": [
+ "eslint:recommended",
+ "standard",
+ "prettier"
+ ],
+ "parserOptions": {
+ "ecmaVersion": 2018
+ },
+ "env": {
+ "node": true
+ },
+ "rules": {
+ // Do not allow importing of implicit dependencies.
+ "import/no-extraneous-dependencies": "error"
+ }
+}
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000000..c6f98d843d
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,17 @@
+version: 2
+updates:
+ - package-ecosystem: "npm"
+ directory: "/"
+ schedule:
+ interval: "daily"
+
+ pull-request-branch-name:
+ # Separate sections of the branch name with a hyphen
+ # Docker images use the branch name and do not support slashes in tags
+ # https://github.com/overleaf/google-ops/issues/822
+ # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
+ separator: "-"
+
+ # Block informal upgrades -- security upgrades use a separate queue.
+ # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
+ open-pull-requests-limit: 0
diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 0000000000..e692368d0e
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1,8 @@
+{
+ "arrowParens": "avoid",
+ "semi": false,
+ "singleQuote": true,
+ "trailingComma": "es5",
+ "tabWidth": 2,
+ "useTabs": false
+}
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 5d9f82d9fc..814c1c6775 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -4,7 +4,7 @@ Contributing to ShareLaTeX
Thank you for reading this! If you'd like to report a bug or join in the development
of ShareLaTeX, then here are some notes on how to do that.
-*Note that ShareLaTeX is actually made up of many seperate repositories (a list is available
+*Note that ShareLaTeX is actually made up of many separate repositories (a list is available
[here](https://github.com/sharelatex/sharelatex/blob/master/README.md#other-repositories)).*
Reporting bugs and opening issues
diff --git a/Dockerfile b/Dockerfile
index dce3f6d015..c5ce81ef24 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -5,60 +5,34 @@
ARG SHARELATEX_BASE_TAG=sharelatex/sharelatex-base:latest
FROM $SHARELATEX_BASE_TAG
-ENV SHARELATEX_CONFIG /etc/sharelatex/settings.coffee
-
+WORKDIR /var/www/sharelatex
# Add required source files
# -------------------------
-ADD ${baseDir}/bin /var/www/sharelatex/bin
-ADD ${baseDir}/doc /var/www/sharelatex/doc
-ADD ${baseDir}/migrations /var/www/sharelatex/migrations
-ADD ${baseDir}/tasks /var/www/sharelatex/tasks
-ADD ${baseDir}/Gruntfile.coffee /var/www/sharelatex/Gruntfile.coffee
-ADD ${baseDir}/package.json /var/www/sharelatex/package.json
-ADD ${baseDir}/npm-shrinkwrap.json /var/www/sharelatex/npm-shrinkwrap.json
-ADD ${baseDir}/services.js /var/www/sharelatex/config/services.js
-
-
-# Copy build dependencies
-# -----------------------
-ADD ${baseDir}/git-revision.sh /var/www/git-revision.sh
-ADD ${baseDir}/services.js /var/www/sharelatex/config/services.js
-
+ADD ${baseDir}/genScript.js /var/www/sharelatex/genScript.js
+ADD ${baseDir}/services.js /var/www/sharelatex/services.js
# Checkout services
# -----------------
-RUN cd /var/www/sharelatex \
-&& npm install \
-&& grunt install \
+RUN node genScript checkout | bash \
\
-# Cleanup not needed artifacts
-# ----------------------------
-&& rm -rf /root/.cache /root/.npm $(find /tmp/ -mindepth 1 -maxdepth 1) \
-# Stores the version installed for each service
+# Store the revision for each service
# ---------------------------------------------
-&& cd /var/www \
-&& ./git-revision.sh > revisions.txt \
+&& node genScript revisions | bash > /var/www/revisions.txt \
\
# Cleanup the git history
# -------------------
-&& rm -rf $(find /var/www/sharelatex -name .git)
+&& node genScript cleanup-git | bash
# Install npm dependencies
# ------------------------
-RUN cd /var/www/sharelatex \
-&& bash ./bin/install-services \
- \
-# Cleanup not needed artifacts
-# ----------------------------
-&& rm -rf /root/.cache /root/.npm $(find /tmp/ -mindepth 1 -maxdepth 1)
+RUN node genScript install | bash
-# Compile CoffeeScript
+# Compile
# --------------------
-RUN cd /var/www/sharelatex \
-&& bash ./bin/compile-services
+RUN node genScript compile | bash
-# Links CLSI sycntex to its default location
+# Links CLSI synctex to its default location
# ------------------------------------------
RUN ln -s /var/www/sharelatex/clsi/bin/synctex /opt/synctex
@@ -70,13 +44,14 @@ ADD ${baseDir}/runit /etc/service
# Configure nginx
# ---------------
-ADD ${baseDir}/nginx/nginx.conf /etc/nginx/nginx.conf
+ADD ${baseDir}/nginx/nginx.conf.template /etc/nginx/templates/nginx.conf.template
ADD ${baseDir}/nginx/sharelatex.conf /etc/nginx/sites-enabled/sharelatex.conf
# Configure log rotation
# ----------------------
ADD ${baseDir}/logrotate/sharelatex /etc/logrotate.d/sharelatex
+RUN chmod 644 /etc/logrotate.d/sharelatex
# Copy Phusion Image startup scripts to its location
@@ -85,10 +60,17 @@ COPY ${baseDir}/init_scripts/ /etc/my_init.d/
# Copy app settings files
# -----------------------
-COPY ${baseDir}/settings.coffee /etc/sharelatex/settings.coffee
+COPY ${baseDir}/settings.js /etc/sharelatex/settings.js
+
+# Copy grunt thin wrapper
+# -----------------------
+ADD ${baseDir}/bin/grunt /usr/local/bin/grunt
+RUN chmod +x /usr/local/bin/grunt
# Set Environment Variables
# --------------------------------
+ENV SHARELATEX_CONFIG /etc/sharelatex/settings.js
+
ENV WEB_API_USER "sharelatex"
ENV SHARELATEX_APP_NAME "Overleaf Community Edition"
diff --git a/Dockerfile-base b/Dockerfile-base
index 15ac43da33..0878d4a8e1 100644
--- a/Dockerfile-base
+++ b/Dockerfile-base
@@ -20,8 +20,8 @@ RUN apt-get update \
qpdf \
aspell aspell-en aspell-af aspell-am aspell-ar aspell-ar-large aspell-bg aspell-bn aspell-br aspell-ca aspell-cs aspell-cy aspell-da aspell-de aspell-el aspell-eo aspell-es aspell-et aspell-eu-es aspell-fa aspell-fo aspell-fr aspell-ga aspell-gl-minimos aspell-gu aspell-he aspell-hi aspell-hr aspell-hsb aspell-hu aspell-hy aspell-id aspell-is aspell-it aspell-kk aspell-kn aspell-ku aspell-lt aspell-lv aspell-ml aspell-mr aspell-nl aspell-nr aspell-ns aspell-pa aspell-pl aspell-pt aspell-pt-br aspell-ro aspell-ru aspell-sk aspell-sl aspell-ss aspell-st aspell-sv aspell-tl aspell-tn aspell-ts aspell-uk aspell-uz aspell-xh aspell-zu \
\
-# install Node.JS 10
-&& curl -sSL https://deb.nodesource.com/setup_10.x | bash - \
+# install Node.JS 12
+&& curl -sSL https://deb.nodesource.com/setup_12.x | bash - \
&& apt-get install -y nodejs \
\
&& rm -rf \
@@ -30,11 +30,10 @@ RUN apt-get update \
/etc/nginx/sites-enabled/default \
/var/lib/apt/lists/*
-# Install Grunt
+# Add envsubst
# ------------
-RUN npm install -g \
- grunt-cli \
-&& rm -rf /root/.npm
+ADD ./vendor/envsubst /usr/bin/envsubst
+RUN chmod +x /usr/bin/envsubst
# Install TexLive
# ---------------
@@ -46,7 +45,7 @@ RUN npm install -g \
# -f Dockerfile-base -t sharelatex/sharelatex-base .
ARG TEXLIVE_MIRROR=http://mirror.ctan.org/systems/texlive/tlnet
-ENV PATH "${PATH}:/usr/local/texlive/2020/bin/x86_64-linux"
+ENV PATH "${PATH}:/usr/local/texlive/2021/bin/x86_64-linux"
RUN mkdir /install-tl-unx \
&& curl -sSL \
diff --git a/Gruntfile.coffee b/Gruntfile.coffee
deleted file mode 100644
index 7fb8881930..0000000000
--- a/Gruntfile.coffee
+++ /dev/null
@@ -1,232 +0,0 @@
-coffee = require("coffee-script")
-fs = require "fs"
-spawn = require("child_process").spawn
-exec = require("child_process").exec
-rimraf = require "rimraf"
-Path = require "path"
-semver = require "semver"
-knox = require "knox"
-crypto = require "crypto"
-async = require "async"
-settings = require("settings-sharelatex")
-_ = require("underscore")
-
-
-SERVICES = require("./config/services")
-
-module.exports = (grunt) ->
- grunt.loadNpmTasks 'grunt-bunyan'
- grunt.loadNpmTasks 'grunt-execute'
- grunt.loadNpmTasks 'grunt-available-tasks'
- grunt.loadNpmTasks 'grunt-concurrent'
- grunt.loadNpmTasks "grunt-contrib-coffee"
- grunt.loadNpmTasks "grunt-shell"
-
- grunt.task.loadTasks "./tasks"
-
- execute = {}
- for service in SERVICES
- execute[service.name] =
- src: "#{service.name}/app.js"
-
- grunt.initConfig
- execute: execute
-
- concurrent:
- all:
- tasks: ("run:#{service.name}" for service in SERVICES)
- options:
- limit: SERVICES.length
- logConcurrentOutput: true
- coffee:
- migrate:
- expand: true,
- flatten: false,
- cwd: './',
- src: ['./migrations/*.coffee'],
- dest: './',
- ext: '.js'
- options:
- bare:true
-
- shell:
- migrate:
- command: "./node_modules/east/bin/east migrate --adapter east-mongo --url #{settings?.mongo?.url}"
-
- availabletasks:
- tasks:
- options:
- filter: 'exclude',
- tasks: [
- 'concurrent'
- 'execute'
- 'bunyan'
- 'availabletasks'
- ]
- groups:
- "Run tasks": [
- "run"
- "run:all"
- "default"
- ].concat ("run:#{service.name}" for service in SERVICES)
- "Misc": [
- "help"
- ]
- "Install tasks": ("install:#{service.name}" for service in SERVICES).concat(["install:all", "install"])
- "Update tasks": ("update:#{service.name}" for service in SERVICES).concat(["update:all", "update"])
- "Checks": ["check", "check:redis", "check:latexmk", "check:s3", "check:make", "check:mongo"]
-
- for service in SERVICES
- do (service) ->
- grunt.registerTask "install:#{service.name}", "Download and set up the #{service.name} service", () ->
- done = @async()
- Helpers.installService(service, done)
-
-
-
- grunt.registerTask 'install:all', "Download and set up all ShareLaTeX services",
- [].concat(
- ("install:#{service.name}" for service in SERVICES)
- ).concat(['postinstall'])
-
- grunt.registerTask 'install', 'install:all'
- grunt.registerTask 'postinstall', 'Explain postinstall steps', () ->
- Helpers.postinstallMessage @async()
-
- grunt.registerTask 'update:all', "Checkout and update all ShareLaTeX services",
- ["check:make"].concat(
- ("update:#{service.name}" for service in SERVICES)
- )
- grunt.registerTask 'update', 'update:all'
- grunt.registerTask 'run', "Run all of the sharelatex processes", ['concurrent:all']
- grunt.registerTask 'run:all', 'run'
-
- grunt.registerTask 'help', 'Display this help list', 'availabletasks'
- grunt.registerTask 'default', 'run'
-
- grunt.registerTask "check:redis", "Check that redis is installed and running", () ->
- Helpers.checkRedisConnect @async()
-
- grunt.registerTask "check:mongo", "Check that mongo is installed", () ->
- Helpers.checkMongoConnect @async()
-
- grunt.registerTask "check", "Check that you have the required dependencies installed", ["check:redis", "check:mongo", "check:make"]
-
- grunt.registerTask "check:make", "Check that make is installed", () ->
- Helpers.checkMake @async()
-
- grunt.registerTask 'migrate', "compile migrations and run them", ["coffee:migrate", 'shell:migrate']
-
-
- Helpers =
- installService: (service, callback = (error) ->) ->
- console.log "Installing #{service.name}"
- Helpers.cloneGitRepo service, (error) ->
- if error?
- callback(error)
- else
- callback()
-
- cloneGitRepo: (service, callback = (error) ->) ->
- repo_src = service.repo
- dir = service.name
- if !fs.existsSync(dir)
- proc = spawn "git", [
- "clone",
- repo_src,
- dir
- ], stdio: "inherit"
- proc.on "close", () ->
- Helpers.checkoutVersion service, callback
- else
- console.log "#{dir} already installed, skipping."
- callback()
-
- checkoutVersion: (service, callback = (error) ->) ->
- dir = service.name
- grunt.log.write "checking out #{service.name} #{service.version}"
- proc = spawn "git", ["checkout", service.version], stdio: "inherit", cwd: dir
- proc.on "close", () ->
- callback()
-
- postinstallMessage: (callback = (error) ->) ->
- grunt.log.write """
- Services cloned:
- #{service.name for service in SERVICES}
- To install services run:
- $ source bin/install-services
- This will install the required node versions and run `npm install` for each service.
- See https://github.com/sharelatex/sharelatex/pull/549 for more info.
- """
- callback()
-
- checkMake: (callback = (error) ->) ->
- grunt.log.write "Checking make is installed... "
- exec "make --version", (error, stdout, stderr) ->
- if error? and error.message.match("not found")
- grunt.log.error "FAIL."
- grunt.log.errorlns """
- Either make is not installed or is not in your path.
-
- On Ubuntu you can install make with:
-
- sudo apt-get install build-essential
-
- """
- return callback(error)
- else if error?
- return callback(error)
- else
- grunt.log.write "OK."
- return callback()
- checkMongoConnect: (callback = (error) ->) ->
- grunt.log.write "Checking can connect to mongo"
- mongojs = require("mongojs")
- db = mongojs(settings.mongo.url, ["tags"])
- db.runCommand { ping: 1 }, (err, res) ->
- if !err and res.ok
- grunt.log.write "OK."
- return callback()
- db.on 'error', (err)->
- err = "Can not connect to mongodb"
- grunt.log.error "FAIL."
- grunt.log.errorlns """
- !!!!!!!!!!!!!! MONGO ERROR !!!!!!!!!!!!!!
-
- ShareLaTeX can not talk to the mongdb instance
-
- Check the mongodb instance is running and accessible on env var SHARELATEX_MONGO_URL
-
- !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- """
- throw new Error("Can not connect to Mongodb")
- return callback(err)
-
- checkRedisConnect: (callback = (error) ->) ->
- grunt.log.write "Checking can connect to redis\n"
- rclient = require("redis").createClient(settings.redis.web)
-
- rclient.ping (err, res) ->
- if !err?
- grunt.log.write "OK."
- else
- throw new Error("Can not connect to redis")
- return callback()
- errorHandler = _.once (err)->
- err = "Can not connect to redis"
- grunt.log.error "FAIL."
- grunt.log.errorlns """
- !!!!!!!!!!!!!! REDIS ERROR !!!!!!!!!!!!!!
-
- ShareLaTeX can not talk to the redis instance
-
- Check the redis instance is running and accessible on env var SHARELATEX_REDIS_HOST
-
- !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- """
- throw new Error("Can not connect to redis")
- return callback(err)
- rclient.on 'error', errorHandler
-
-
-
diff --git a/Makefile b/Makefile
index 55eca07ac2..4896799f31 100644
--- a/Makefile
+++ b/Makefile
@@ -2,9 +2,11 @@
SHARELATEX_BASE_TAG := sharelatex/sharelatex-base
SHARELATEX_TAG := sharelatex/sharelatex
+SHARELATEX_BASE_CACHE := $(shell echo $(SHARELATEX_BASE_TAG) | sed -E 's/(.+):.+/\1:latest/')
build-base:
- docker build -f Dockerfile-base -t $(SHARELATEX_BASE_TAG) .
+ docker pull $(SHARELATEX_BASE_CACHE)
+ docker build -f Dockerfile-base --pull --cache-from $(SHARELATEX_BASE_CACHE) -t $(SHARELATEX_BASE_TAG) .
build-community:
diff --git a/README.md b/README.md
index 0edb0a6b8d..1d7737608c 100644
--- a/README.md
+++ b/README.md
@@ -8,20 +8,23 @@
Key Features •
Wiki •
- Server Pro •
+ Server Pro •
Contributing •
- Mailing List •
+ Mailing List •
Authors •
License
+
+ Figure 1: A screenshot of Overleaf Server Pro's comments and tracked changes features.
+
## Key Features
-[Overleaf](https://www.overleaf.com) is an open-source online real-time collaborative LaTeX editor. We run a hosted version at http://www.overleaf.com, but you can also run your own local version, and contribute to the development of Overleaf.
+[Overleaf](https://www.overleaf.com) is an open-source online real-time collaborative LaTeX editor. We run a hosted version at [www.overleaf.com](https://www.overleaf.com), but you can also run your own local version, and contribute to the development of Overleaf.
-*[If you want help installing and maintaining Overleaf at your university or workplace, we offer an officially supported version called Overleaf Server Pro. It also comes with extra security and admin features. Click here to find out more!](https://www.overleaf.com/university/onsite.html)*
+*[If you want help installing and maintaining Overleaf in your lab or workplace, we offer an officially supported version called Overleaf Server Pro. It also comes with extra security and admin features. Click here to find out more!](https://www.overleaf.com/for/enterprises)*
## Keeping up to date
@@ -39,7 +42,7 @@ If you are upgrading from a previous version of Overleaf, please see the [Releas
## Other repositories
-This repository does not contain any code. It acts a wrapper and toolkit for managing the many different Overleaf services. These each run as their own Node.js process and have their own Github repository. These are all downloaded and set up when you run `grunt install`
+This repository does not contain any code. It acts a wrapper and toolkit for managing the many different Overleaf services. These each run as their own Node.js process and have their own GitHub repository.
| Service | Description |
| ------- | ----------- |
diff --git a/bin/compile-services b/bin/compile-services
deleted file mode 100755
index 76c432143a..0000000000
--- a/bin/compile-services
+++ /dev/null
@@ -1,23 +0,0 @@
-#! env bash
-
-set -e
-
-grep 'name:' config/services.js | \
- sed 's/.*name: "\(.*\)",/\1/' | \
- while read service
- do
- pushd $service
- echo "Compiling Service $service"
- case $service in
- web)
- npm run webpack:production
- ;;
- real-time)
- npm run compile:all
- ;;
- *)
- echo "$service doesn't require a compilation"
- ;;
- esac
- popd
- done
diff --git a/bin/grunt b/bin/grunt
new file mode 100755
index 0000000000..03f89b52c7
--- /dev/null
+++ b/bin/grunt
@@ -0,0 +1,31 @@
+#!/bin/bash
+# Thin wrapper on old grunt tasks to ease migrating.
+
+set -e
+TASK="$1"
+shift 1
+
+cd /var/www/sharelatex/web/modules/server-ce-scripts/scripts
+
+case "$TASK" in
+ user:create-admin)
+ node create-user --admin "$@"
+ ;;
+
+ user:delete)
+ node delete-user "$@"
+ ;;
+
+ check:mongo)
+ node check-mongodb
+ ;;
+
+ check:redis)
+ node check-redis
+ ;;
+
+ *)
+ echo "Unknown task $TASK"
+ exit 1
+ ;;
+esac
diff --git a/bin/install-services b/bin/install-services
deleted file mode 100755
index b688a4b4f8..0000000000
--- a/bin/install-services
+++ /dev/null
@@ -1,14 +0,0 @@
-#! env bash
-
-set -e
-
-grep 'name:' config/services.js | \
- sed 's/.*name: "\(.*\)",/\1/' | \
- while read service
- do
- pushd $service
- echo "Installing service $service"
- npm install --quiet
- popd
- done
-
diff --git a/docker-compose.yml b/docker-compose.yml
index dbef5f8882..58240a5687 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -19,7 +19,7 @@ services:
volumes:
- ~/sharelatex_data:/var/lib/sharelatex
########################################################################
- #### Server Pro: Un-comment the following line to mount the docker ####
+ #### Server Pro: Uncomment the following line to mount the docker ####
#### socket, required for Sibling Containers to work ####
########################################################################
# - /var/run/docker.sock:/var/run/docker.sock
@@ -69,7 +69,9 @@ services:
# SHARELATEX_EMAIL_SMTP_PASS:
# SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH: true
# SHARELATEX_EMAIL_SMTP_IGNORE_TLS: false
- # SHARELATEX_CUSTOM_EMAIL_FOOTER: "This system is run by department x
"
+ # SHARELATEX_EMAIL_SMTP_NAME: '127.0.0.1'
+ # SHARELATEX_EMAIL_SMTP_LOGGER: true
+ # SHARELATEX_CUSTOM_EMAIL_FOOTER: "This system is run by department x"
################
## Server Pro ##
@@ -102,7 +104,7 @@ services:
mongo:
restart: always
- image: mongo
+ image: mongo:4.0
container_name: mongo
expose:
- 27017
diff --git a/genScript.js b/genScript.js
new file mode 100644
index 0000000000..e8f61f2ba2
--- /dev/null
+++ b/genScript.js
@@ -0,0 +1,62 @@
+const services = require('./services')
+
+console.log('#!/bin/bash')
+console.log('set -ex')
+
+switch (process.argv.pop()) {
+ case 'checkout':
+ for (const service of services) {
+ console.log(`git clone ${service.repo} ${service.name}`)
+ console.log(`git -C ${service.name} checkout ${service.version}`)
+ }
+ break
+ case 'revisions':
+ for (const service of services) {
+ console.log(`echo -n /var/www/sharelatex/${service.name},`)
+ console.log(`git -C ${service.name} rev-parse HEAD`)
+ }
+ break
+ case 'cleanup-git':
+ for (const service of services) {
+ console.log(`rm -rf ${service.name}/.git`)
+ }
+ break
+ case 'install':
+ for (const service of services) {
+ console.log('pushd', service.name)
+ switch (service.name) {
+ case 'web':
+ console.log('npm ci')
+ break
+ default:
+ // TODO(das7pad): revert back to npm ci --only=production (https://github.com/overleaf/issues/issues/4544)
+ console.log('npm ci')
+ }
+ console.log('popd')
+ }
+ break
+ case 'compile':
+ for (const service of services) {
+ console.log('pushd', service.name)
+ switch (service.name) {
+ case 'web':
+ console.log('npm run webpack:production')
+ // drop webpack/babel cache
+ console.log('rm -rf node_modules/.cache')
+ break
+ default:
+ console.log(`echo ${service.name} does not require a compilation`)
+ }
+ console.log('popd')
+ }
+ break
+ default:
+ console.error('unknown command')
+ console.log('exit 101')
+ process.exit(101)
+}
+
+console.log('set +x')
+console.log(
+ 'rm -rf /root/.cache /root/.npm $(find /tmp/ -mindepth 1 -maxdepth 1)'
+)
diff --git a/git-revision.sh b/git-revision.sh
deleted file mode 100755
index e26f75bfd4..0000000000
--- a/git-revision.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-
-for gitDir in $(find "$PWD" -name .git); do
- echo -n "$(dirname ${gitDir}),"
- git --git-dir="$gitDir" rev-parse HEAD
-done
diff --git a/hotfix/2.3.1/Dockerfile b/hotfix/2.3.1/Dockerfile
new file mode 100644
index 0000000000..36f136aacc
--- /dev/null
+++ b/hotfix/2.3.1/Dockerfile
@@ -0,0 +1,7 @@
+FROM sharelatex/sharelatex:2.3.0
+
+
+# Patch: Fixes NPE when invoking synctex (https://github.com/overleaf/overleaf/issues/756)
+ADD check-clsi-setting-exists.patch /var/www/sharelatex/clsi/app/js/check-clsi-setting-exists.patch
+RUN cd /var/www/sharelatex/clsi/app/js && \
+ patch < check-clsi-setting-exists.patch
diff --git a/hotfix/2.3.1/check-clsi-setting-exists.patch b/hotfix/2.3.1/check-clsi-setting-exists.patch
new file mode 100644
index 0000000000..6f6535bc69
--- /dev/null
+++ b/hotfix/2.3.1/check-clsi-setting-exists.patch
@@ -0,0 +1,11 @@
+--- a/app/js/CompileManager.js
++++ b/app/js/CompileManager.js
+@@ -536,7 +536,7 @@ module.exports = CompileManager = {
+ compileName,
+ command,
+ directory,
+- Settings.clsi != null ? Settings.clsi.docker.image : undefined,
++ Settings.clsi && Settings.clsi.docker ? Settings.clsi.docker.image : undefined,
+ timeout,
+ {},
+ function(error, output) {
diff --git a/hotfix/2.4.1/Dockerfile b/hotfix/2.4.1/Dockerfile
new file mode 100644
index 0000000000..d7655511c1
--- /dev/null
+++ b/hotfix/2.4.1/Dockerfile
@@ -0,0 +1,6 @@
+FROM sharelatex/sharelatex:2.4.0
+
+
+# Patch: Fixes missing dependencies on web startup (https://github.com/overleaf/overleaf/issues/767)
+RUN cd /var/www/sharelatex/web && \
+ npm install i18next@^19.6.3 i18next-fs-backend@^1.0.7 i18next-http-middleware@^3.0.2
diff --git a/hotfix/2.4.2/Dockerfile b/hotfix/2.4.2/Dockerfile
new file mode 100644
index 0000000000..640eea78c3
--- /dev/null
+++ b/hotfix/2.4.2/Dockerfile
@@ -0,0 +1,10 @@
+FROM sharelatex/sharelatex:2.4.1
+
+
+# Patch: Fixes anonymous read/write sharing
+COPY anonymous-metadata.patch ${baseDir}
+RUN cd ${baseDir} && patch -p0 < anonymous-metadata.patch
+
+# Patch: Fixes left footer with html text
+COPY left-footer-skip-translation.patch ${baseDir}
+RUN cd ${baseDir} && patch -p0 < left-footer-skip-translation.patch
diff --git a/hotfix/2.4.2/anonymous-metadata.patch b/hotfix/2.4.2/anonymous-metadata.patch
new file mode 100644
index 0000000000..ea041abf9c
--- /dev/null
+++ b/hotfix/2.4.2/anonymous-metadata.patch
@@ -0,0 +1,43 @@
+--- /var/www/sharelatex/web/app/src/router.js 2020-09-14 20:21:39.741433000 +0000
++++ /var/www/sharelatex/web/app/src/router.js 2020-09-14 20:13:08.000000000 +0000
+@@ -607,16 +607,17 @@
+ ProjectDownloadsController.downloadMultipleProjects
+ )
+
++ console.log(`allowAnonymousReadAndWriteSharing: ${Settings.allowAnonymousReadAndWriteSharing}`)
+ webRouter.get(
+ '/project/:project_id/metadata',
+ AuthorizationMiddleware.ensureUserCanReadProject,
+- AuthenticationController.requireLogin(),
++ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
+ MetaController.getMetadata
+- )
++ )
+ webRouter.post(
+ '/project/:project_id/doc/:doc_id/metadata',
+ AuthorizationMiddleware.ensureUserCanReadProject,
+- AuthenticationController.requireLogin(),
++ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
+ MetaController.broadcastMetadataForDoc
+ )
+ privateApiRouter.post(
+--- /var/www/sharelatex/web/app/src/Features/Contacts/ContactRouter.js 2020-09-14 20:21:52.243779000 +0000
++++ /var/www/sharelatex/web/app/src/Features/Contacts/ContactRouter.js 2020-09-14 20:13:08.000000000 +0000
+@@ -5,6 +5,8 @@
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
++const Settings = require('settings-sharelatex')
++
+ const AuthenticationController = require('../Authentication/AuthenticationController')
+ const ContactController = require('./ContactController')
+
+@@ -12,7 +14,7 @@
+ apply(webRouter, apiRouter) {
+ return webRouter.get(
+ '/user/contacts',
+- AuthenticationController.requireLogin(),
++ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
+ ContactController.getContacts
+ )
+ }
diff --git a/hotfix/2.4.2/left-footer-skip-translation.patch b/hotfix/2.4.2/left-footer-skip-translation.patch
new file mode 100644
index 0000000000..ee6e33a417
--- /dev/null
+++ b/hotfix/2.4.2/left-footer-skip-translation.patch
@@ -0,0 +1,12 @@
+
+--- /var/www/sharelatex/web/app/views/layout/footer.pug
++++ /var/www/sharelatex/web/app/app/views/layout/footer.pug
+@@ -32,7 +32,7 @@ footer.site-footer
+ if item.url
+ a(href=item.url, class=item.class) !{translate(item.text)}
+ else
+- | !{translate(item.text)}
++ | !{item.text}
+
+ ul.col-md-3.text-right
+
diff --git a/hotfix/2.5.1/Dockerfile b/hotfix/2.5.1/Dockerfile
new file mode 100644
index 0000000000..d22f9123d1
--- /dev/null
+++ b/hotfix/2.5.1/Dockerfile
@@ -0,0 +1,13 @@
+FROM sharelatex/sharelatex:2.5.0
+
+# Patch #826: Fixes log path for contacts service to be picked up by logrotate
+COPY contacts-run.patch /etc/service/contacts-sharelatex
+RUN cd /etc/service/contacts-sharelatex && patch < contacts-run.patch
+
+# Patch #826: delete old logs for the contacts service
+COPY delete-old-logs.patch /etc/my_init.d
+RUN cd /etc/my_init.d && patch < delete-old-logs.patch \
+&& chmod +x /etc/my_init.d/10_delete_old_logs.sh
+
+# Patch #827: fix logrotate file permissions
+RUN chmod 644 /etc/logrotate.d/sharelatex
diff --git a/hotfix/2.5.1/contacts-run.patch b/hotfix/2.5.1/contacts-run.patch
new file mode 100644
index 0000000000..81ef36ecb0
--- /dev/null
+++ b/hotfix/2.5.1/contacts-run.patch
@@ -0,0 +1,8 @@
+--- a/run
++++ b/run
+@@ -7,4 +7,4 @@ if [ "$DEBUG_NODE" == "true" ]; then
+ NODE_PARAMS="--inspect=0.0.0.0:30360"
+ fi
+
+-exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts 2>&1
++exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts.log 2>&1
diff --git a/hotfix/2.5.1/delete-old-logs.patch b/hotfix/2.5.1/delete-old-logs.patch
new file mode 100644
index 0000000000..bc2be14adf
--- /dev/null
+++ b/hotfix/2.5.1/delete-old-logs.patch
@@ -0,0 +1,10 @@
+--- /dev/null
++++ b/10_delete_old_logs.sh
+@@ -0,0 +1,7 @@
++#!/bin/sh
++set -e
++
++# Up to version 2.5.0 the logs of the contacts service were written into a
++# file that was not picked up by logrotate.
++# The service is stable and we can safely discard any logs.
++rm -vf /var/log/sharelatex/contacts
diff --git a/hotfix/2.5.2/12_update_token_email.js b/hotfix/2.5.2/12_update_token_email.js
new file mode 100644
index 0000000000..e4d6e32254
--- /dev/null
+++ b/hotfix/2.5.2/12_update_token_email.js
@@ -0,0 +1,28 @@
+const Settings = require('settings-sharelatex')
+const mongojs = require('mongojs')
+const db = mongojs(Settings.mongo.url, ['tokens'])
+const async = require('async')
+
+exports.migrate = (client, done) => {
+ console.log(`>> Updating 'data.email' to lower case in tokens`)
+
+ db.tokens.find({}, { 'data.email': 1 }, (err, tokens) => {
+ if (err) {
+ return done(err)
+ }
+
+ async.eachSeries(
+ tokens,
+ (token, callback) => {
+ db.tokens.update(
+ { _id: token._id },
+ { $set: { 'data.email': token.data.email.toLowerCase() } },
+ callback
+ )
+ },
+ done
+ )
+ })
+}
+
+exports.rollback = (client, done) => done()
diff --git a/hotfix/2.5.2/Dockerfile b/hotfix/2.5.2/Dockerfile
new file mode 100644
index 0000000000..ddf596deea
--- /dev/null
+++ b/hotfix/2.5.2/Dockerfile
@@ -0,0 +1,8 @@
+FROM sharelatex/sharelatex:2.5.1
+
+# Patch: fixes registration token creation
+COPY create-token-lowercase-email.patch ${baseDir}
+RUN cd ${baseDir} && patch -p0 < create-token-lowercase-email.patch
+
+# Migration for tokens with invalid email addresses
+ADD 12_update_token_email.js /var/www/sharelatex/migrations/12_update_token_email.js
diff --git a/hotfix/2.5.2/create-token-lowercase-email.patch b/hotfix/2.5.2/create-token-lowercase-email.patch
new file mode 100644
index 0000000000..23dfaa3a43
--- /dev/null
+++ b/hotfix/2.5.2/create-token-lowercase-email.patch
@@ -0,0 +1,11 @@
+--- /var/www/sharelatex/web/app/src/Features/User/UserRegistrationHandler.js
++++ /var/www/sharelatex/web/app/src/Features/User/UserRegistrationHandler.js
+@@ -122,7 +122,7 @@ const UserRegistrationHandler = {
+ const ONE_WEEK = 7 * 24 * 60 * 60 // seconds
+ OneTimeTokenHandler.getNewToken(
+ 'password',
+- { user_id: user._id.toString(), email },
++ { user_id: user._id.toString(), email: user.email },
+ { expiresIn: ONE_WEEK },
+ (err, token) => {
+ if (err != null) {
diff --git a/hotfix/2.6.1/Dockerfile b/hotfix/2.6.1/Dockerfile
new file mode 100644
index 0000000000..6df467b09b
--- /dev/null
+++ b/hotfix/2.6.1/Dockerfile
@@ -0,0 +1,5 @@
+FROM sharelatex/sharelatex:2.6.0-RC1
+
+# Patch: fixes Project restore inserts bad projectId into deletedFiles
+COPY document-deleter-object-id.patch ${baseDir}
+RUN cd ${baseDir} && patch -p0 < document-deleter-object-id.patch
diff --git a/hotfix/2.6.1/document-deleter-object-id.patch b/hotfix/2.6.1/document-deleter-object-id.patch
new file mode 100644
index 0000000000..a92ce49a13
--- /dev/null
+++ b/hotfix/2.6.1/document-deleter-object-id.patch
@@ -0,0 +1,10 @@
+--- /var/www/sharelatex/web/app/src/Features/Project/ProjectDeleter.js
++++ /var/www/sharelatex/web/app/src/Features/Project/ProjectDeleter.js
+@@ -278,6 +278,7 @@ async function deleteProject(projectId, options = {}) {
+ }
+
+ async function undeleteProject(projectId, options = {}) {
++ projectId = ObjectId(projectId)
+ let deletedProject = await DeletedProject.findOne({
+ 'deleterData.deletedProjectId': projectId
+ }).exec()
diff --git a/hotfix/2.6.2/Dockerfile b/hotfix/2.6.2/Dockerfile
new file mode 100644
index 0000000000..2df365143e
--- /dev/null
+++ b/hotfix/2.6.2/Dockerfile
@@ -0,0 +1,5 @@
+FROM sharelatex/sharelatex:2.6.1
+
+# Patch: fixes overleaf.com onboarding email being sent in CE/SP
+COPY onboarding-email.patch ${baseDir}
+RUN cd ${baseDir} && patch -p0 < onboarding-email.patch
diff --git a/hotfix/2.6.2/onboarding-email.patch b/hotfix/2.6.2/onboarding-email.patch
new file mode 100644
index 0000000000..2d1fed5686
--- /dev/null
+++ b/hotfix/2.6.2/onboarding-email.patch
@@ -0,0 +1,25 @@
+--- /var/www/sharelatex/web/app/src/Features/User/UserCreator.js
++++ /var/www/sharelatex/web/app/src/Features/User/UserCreator.js
+@@ -85,13 +85,15 @@ async function createNewUser(attributes, options = {}) {
+ }
+
+ Analytics.recordEvent(user._id, 'user-registered')
+- try {
+- await UserOnboardingEmailQueueManager.scheduleOnboardingEmail(user)
+- } catch (error) {
+- logger.error(
+- `Failed to schedule sending of onboarding email for user '${user._id}'`,
+- error
+- )
++ if(Features.hasFeature('saas')) {
++ try {
++ await UserOnboardingEmailQueueManager.scheduleOnboardingEmail(user)
++ } catch (error) {
++ logger.error(
++ `Failed to schedule sending of onboarding email for user '${user._id}'`,
++ error
++ )
++ }
+ }
+
+ return user
diff --git a/init_scripts/01_nginx_config_template.sh b/init_scripts/01_nginx_config_template.sh
new file mode 100755
index 0000000000..84f39ea637
--- /dev/null
+++ b/init_scripts/01_nginx_config_template.sh
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+set -e
+
+## Generate nginx config files from templates,
+## with environment variables substituted
+
+nginx_dir='/etc/nginx'
+nginx_templates_dir="${nginx_dir}/templates"
+
+if ! [ -d "${nginx_templates_dir}" ]; then
+ echo "Nginx: no template directory found, skipping"
+ exit 0
+fi
+
+nginx_template_file="${nginx_templates_dir}/nginx.conf.template"
+nginx_config_file="${nginx_dir}/nginx.conf"
+
+if [ -f "${nginx_template_file}" ]; then
+ export NGINX_WORKER_PROCESSES="${NGINX_WORKER_PROCESSES:-4}"
+ export NGINX_WORKER_CONNECTIONS="${NGINX_WORKER_CONNECTIONS:-768}"
+
+ echo "Nginx: generating config file from template"
+
+ # Note the single-quotes, they are important.
+ # This is a pass-list of env-vars that envsubst
+ # should operate on.
+ envsubst '${NGINX_WORKER_PROCESSES} ${NGINX_WORKER_CONNECTIONS}' \
+ < "${nginx_template_file}" \
+ > "${nginx_config_file}"
+
+ echo "Nginx: reloading config"
+ service nginx reload
+fi
diff --git a/init_scripts/10_delete_old_logs.sh b/init_scripts/10_delete_old_logs.sh
new file mode 100755
index 0000000000..1b606dbf92
--- /dev/null
+++ b/init_scripts/10_delete_old_logs.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+set -e
+
+# Up to version 2.5.0 the logs of the contacts service were written into a
+# file that was not picked up by logrotate.
+# The service is stable and we can safely discard any logs.
+rm -vf /var/log/sharelatex/contacts
diff --git a/init_scripts/98_check_db_access.sh b/init_scripts/98_check_db_access.sh
index f8507f582f..aab69e24fe 100755
--- a/init_scripts/98_check_db_access.sh
+++ b/init_scripts/98_check_db_access.sh
@@ -2,6 +2,7 @@
set -e
echo "Checking can connect to mongo and redis"
-cd /var/www/sharelatex && grunt check:redis
-cd /var/www/sharelatex && grunt check:mongo
+cd /var/www/sharelatex/web/modules/server-ce-scripts/scripts
+node check-mongodb
+node check-redis
echo "All checks passed"
diff --git a/init_scripts/99_migrate.sh b/init_scripts/99_migrate.sh
deleted file mode 100755
index f880fa816f..0000000000
--- a/init_scripts/99_migrate.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-set -e
-
-which node
-which grunt
-ls -al /var/www/sharelatex/migrations
-cd /var/www/sharelatex && grunt migrate -v
-echo "All migrations finished"
diff --git a/init_scripts/99_run_web_migrations.sh b/init_scripts/99_run_web_migrations.sh
new file mode 100755
index 0000000000..a94ce18602
--- /dev/null
+++ b/init_scripts/99_run_web_migrations.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+if [[ "${SHARELATEX_IS_SERVER_PRO:-null}" == "true" ]]; then
+ environment="server-pro"
+else
+ environment="server-ce"
+fi
+
+echo "Running migrations for $environment"
+cd /var/www/sharelatex/web
+npm run migrations -- migrate -t "$environment"
+echo "Finished migrations"
diff --git a/migrations/10_update_project_tokens.js b/migrations/10_update_project_tokens.js
deleted file mode 100644
index 57095308f8..0000000000
--- a/migrations/10_update_project_tokens.js
+++ /dev/null
@@ -1,109 +0,0 @@
-const Settings = require('settings-sharelatex')
-const Async = require('async')
-const mongojs = require('mongojs')
-const db = mongojs(Settings.mongo.url, ['users'])
-
-const indexKeys = { 'tokens.readAndWritePrefix': 1 }
-const indexOpts = {
- unique: true,
- partialFilterExpression: {
- 'tokens.readAndWritePrefix': { $exists: true }
- },
- background: true
-}
-
-// Index on Prefix
-const addReadAndWritePrefixIndex = (db, callback) => {
- db.projects.ensureIndex(indexKeys, indexOpts, callback)
-}
-
-const removeReadAndWritePrefixIndex = (db, callback) => {
- db.projects.dropIndex(indexKeys, callback)
-}
-
-// Extract prefix data
-const extractPrefix = (db, callback) => {
- db.projects.find(
- {
- 'tokens.readAndWrite': { $exists: true },
- 'tokens.readAndWritePrefix': { $exists: false }
- },
- { tokens: 1 },
- (err, projects) => {
- if (err) {
- return callback(err)
- }
- console.log(`>> Updating ${projects.length} projects`)
- Async.eachLimit(
- projects,
- 5,
- (project, cb) => {
- const rwToken = project.tokens.readAndWrite
- const prefixMatch = rwToken.match(/^(\d+).*$/)
- if (!prefixMatch) {
- const err = new Error(
- `no prefix on token: ${project._id}, ${rwToken}`
- )
- console.log(`>> Error, ${err.message}`)
- return cb(err)
- }
- db.projects.update(
- { _id: project._id },
- { $set: { 'tokens.readAndWritePrefix': prefixMatch[1] } },
- cb
- )
- },
- err => {
- if (err) {
- return callback(err)
- }
- console.log('>> done')
- callback()
- }
- )
- }
- )
-}
-
-const erasePrefix = (db, callback) => {
- db.projects.update({$unset: 'tokens.readAndWritePrefix'}, callback)
-}
-
-
-// Migrations
-
-exports.migrate = (client, done) => {
- console.log(`>> Adding index to projects: ${JSON.stringify(indexKeys)}, with options: ${JSON.stringify(indexOpts)}`)
- addReadAndWritePrefixIndex(db, (err) => {
- if(err) {
- console.log(">> Error while adding index")
- return done(err)
- }
- console.log(">> Extracting tokens.readAndWritePrefix field for existing projects")
- extractPrefix(db, (err) => {
- if(err) {
- console.log(">> Error while extracting prefix data")
- return done(err)
- }
- done()
- })
- })
-}
-
-exports.rollback = (client, done) => {
- console.log(`>> Dropping index on projects: ${JSON.stringify(indexKeys)}`)
- removeReadAndWritePrefixIndex(db, (err) => {
- if(err) {
- console.log(">> Error while dropping index")
- return done(err)
- }
- console.log(">> Erasing tokens.readAndWritePrefix field for existing projects")
- erasePrefix(db, (err) => {
- if(err) {
- console.log(">> Error while erasing prefix data")
- return done(err)
- }
- done()
- })
- })
-}
diff --git a/migrations/11_set_project_image_name.js b/migrations/11_set_project_image_name.js
deleted file mode 100644
index 17c06fd6fc..0000000000
--- a/migrations/11_set_project_image_name.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const Settings = require('settings-sharelatex')
-const mongojs = require('mongojs')
-const db = mongojs(Settings.mongo.url, ['projects'])
-
-exports.migrate = (client, done) => {
- console.log(`>> Setting 'imageName' in projects`)
-
- if (!Settings.currentImageName) {
- console.log(`>> 'currentImageName' is not defined, no projects updated`)
- return done()
- }
-
- console.log(`>> Setting 'imageName' = ${Settings.currentImageName}`)
-
- db.projects.update(
- { imageName: { $exists: false } },
- { $set: { imageName: Settings.currentImageName } },
- { multi: true },
- done
- )
-}
-
-exports.rollback = (client, done) => done()
diff --git a/migrations/1_move_doc_lines_to_doc_collection.coffee b/migrations/1_move_doc_lines_to_doc_collection.coffee
deleted file mode 100644
index e4433de7bd..0000000000
--- a/migrations/1_move_doc_lines_to_doc_collection.coffee
+++ /dev/null
@@ -1,164 +0,0 @@
-Settings = require "settings-sharelatex"
-bson = require('bson')
-BSON = new bson()
-fs = require("fs")
-mongojs = require("mongojs")
-ObjectId = mongojs.ObjectId
-console.log Settings.mongo.url
-db = mongojs(Settings.mongo.url, ['projects', 'docs'])
-_ = require("lodash")
-async = require("async")
-exec = require("child_process").exec
-
-finished_projects_path = "/tmp/finished-projects"
-all_projects_path = "/tmp/all-projects"
-project_too_large_path = "/tmp/large_projects"
-
-
-printProgress = ->
- exec "wc #{finished_projects_path}", (error, results) ->
- setTimeout printProgress, 1000 * 30
-
-checkIfFileHasBeenProccessed = (project_id, callback)->
- exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
- hasBeenProcessed = _.include(results, project_id)
- callback(error, hasBeenProcessed)
-
-loadProjectIds = (callback)->
- console.log "loading project ids from #{all_projects_path}"
- fs.readFile all_projects_path, "utf-8", (err, data)->
- ids = data.split("\n")
- ids = _.filter ids, (id)-> id? and id.length == 24
- console.log "loaded #{ids.length} project ids from #{all_projects_path}"
- callback err, ids
-
-getAndWriteProjectids = (callback)->
- console.log "finding all project id's - #{new Date().toString()}"
- db.projects.find {}, {_id:1}, (err, ids)->
- console.log "total found projects in mongo #{ids.length} - #{new Date().toString()}"
- ids = _.pluck ids, '_id'
- ids = _.filter ids, (id)-> id?
- fileData = ids.join("\n")
- fs.writeFile all_projects_path, fileData, ->
- callback(err, ids)
-
-markProjectAsToLargeAndFinished = (project_id, callback)->
- console.log "#{project_id} too large"
- markProjectAsProcessed project_id, (err)->
- fs.appendFile project_too_large_path, "#{project_id}\n", callback
-
-getProjectIds = (callback)->
- exists = fs.existsSync all_projects_path
- if exists
- loadProjectIds callback
- else
- getAndWriteProjectids callback
-
-markProjectAsProcessed = (project_id, callback)->
- fs.appendFile finished_projects_path, "#{project_id}\n", callback
-
-getAllDocs = (project_id, callback = (error, docs) ->) ->
- db.projects.findOne _id:ObjectId(project_id), (error, project) ->
- return callback(error) if error?
- if !project?
- console.log "no such project #{project_id}"
- return callback()
- size = BSON.calculateObjectSize(project)
- if size > 12000000 #12mb
- return markProjectAsToLargeAndFinished project_id, callback
- findAllDocsInProject project, (error, docs) ->
- return callback(error) if error?
- return callback null, docs
-
-findAllDocsInProject = (project, callback = (error, docs) ->) ->
- callback null, _findAllDocsInFolder project.rootFolder[0]
-
-_findDocInFolder = (folder = {}, doc_id, currentPath) ->
- for doc, i in folder.docs or []
- if doc?._id? and doc._id.toString() == doc_id.toString()
- return {
- doc: doc
- mongoPath: "#{currentPath}.docs.#{i}"
- }
-
- for childFolder, i in folder.folders or []
- result = _findDocInFolder childFolder, doc_id, "#{currentPath}.folders.#{i}"
- return result if result?
-
- return null
-
-_findAllDocsInFolder = (folder = {}) ->
- docs = folder.docs or []
- for childFolder in folder.folders or []
- docs = docs.concat _findAllDocsInFolder childFolder
- return docs
-
-insertDocIntoDocCollection = (project_id, doc_id, lines, oldRev, callback)->
- if !project_id?
- return callback("no project id")
- if !doc_id?
- return callback()
- if !lines?
- lines = [""]
- update = {}
- update["_id"] = ObjectId(doc_id.toString())
- update["lines"] = lines
- update["project_id"] = ObjectId(project_id)
- update["rev"] = oldRev || 0
- db.docs.insert update, callback
-
-saveDocsIntoMongo = (project_id, docs, callback)->
- jobs = _.map docs, (doc)->
- (cb)->
- if !doc?
- console.error "null doc in project #{project_id}" #just skip it, not a big deal
- return cb()
- insertDocIntoDocCollection project_id, doc._id, doc.lines, doc.rev, (err)->
- if err?.code == 11000 #duplicate key, doc already in there so its not a problem.
- err = undefined
- if err?
- console.log "error inserting doc into doc collection", err
- cb(err)
-
-
- async.series jobs, callback
-
-
-processNext = (project_id, callback)->
- checkIfFileHasBeenProccessed project_id, (err, hasBeenProcessed)->
- if hasBeenProcessed
- console.log "#{project_id} already procssed, skipping"
- return callback()
- console.log "#{project_id} processing"
- getAllDocs project_id, (err, docs)->
- if err?
- console.error err, project_id, "could not get all docs"
- return callback(err)
- else
- saveDocsIntoMongo project_id, docs, (err)->
- if err?
- console.error err, project_id, "could not save docs into mongo"
- return callback(err)
- markProjectAsProcessed project_id, (err)->
- setTimeout(
- -> callback(err)
- ,0)
-
-
-
-exports.migrate = (client, done = ->)->
- getProjectIds (err, ids)->
- printProgress()
- jobs = _.map ids, (id)->
- return (cb)->
- processNext(id, cb)
- async.series jobs, (err)->
- if err?
- console.error err, "at end of jobs"
- else
- console.log "finished"
- done(err)
-
-
-exports.rollback = (next)->
- next()
diff --git a/migrations/2_doc_lines_delete_from_project.coffee b/migrations/2_doc_lines_delete_from_project.coffee
deleted file mode 100644
index 2d5222f63d..0000000000
--- a/migrations/2_doc_lines_delete_from_project.coffee
+++ /dev/null
@@ -1,185 +0,0 @@
-Settings = require "settings-sharelatex"
-fs = require("fs")
-mongojs = require("mongojs")
-ObjectId = mongojs.ObjectId
-db = mongojs(Settings.mongo.url, ['projects', 'docs'])
-_ = require("lodash")
-async = require("async")
-exec = require("child_process").exec
-
-finished_projects_path = "/tmp/finished-projects-2"
-all_projects_path = "/tmp/all-projects-2"
-unmigrated_docs_path = "/tmp/unmigrated-2"
-
-
-printProgress = ->
- exec "wc #{finished_projects_path}", (error, results) ->
- setTimeout printProgress, 1000 * 30
-
-checkIfFileHasBeenProccessed = (project_id, callback)->
- exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
- hasBeenProcessed = _.include(results, project_id)
- callback(error, hasBeenProcessed)
-
-loadProjectIds = (callback)->
- console.log "loading project ids from #{all_projects_path}"
- fs.readFile all_projects_path, "utf-8", (err, data)->
- ids = data.split("\n")
- ids = _.filter ids, (id)-> id? and id.length == 24
- console.log "loaded #{ids.length} project ids from #{all_projects_path}"
- callback err, ids
-
-getAndWriteProjectids = (callback)->
- console.log "finding all project id's - #{new Date().toString()}"
- db.projects.find {}, {_id:1}, (err, ids)->
- console.log "total found projects in mongo #{ids.length} - #{new Date().toString()}"
- ids = _.pluck ids, '_id'
- ids = _.filter ids, (id)-> id?
- fileData = ids.join("\n")
- fs.writeFile all_projects_path, fileData, ->
- callback(err, ids)
-
-markDocAsUnmigrated = (project_id, doc_id, callback)->
- console.log "#{project_id} #{doc_id} unmigrated"
- markProjectAsProcessed project_id, (err)->
- fs.appendFile unmigrated_docs_path, "#{project_id} #{doc_id}\n", callback
-
-markUnmigratedDocs = (project_id, docs, callback)->
- console.log docs.length, project_id, "unmigrated"
- jobs = _.map docs, (doc)->
- (cb)->
- markDocAsUnmigrated project_id, doc._id, cb
- async.series jobs, callback
-
-getProjectIds = (callback)->
- exists = fs.existsSync all_projects_path
- if exists
- loadProjectIds callback
- else
- getAndWriteProjectids callback
-
-markProjectAsProcessed = (project_id, callback)->
- fs.appendFile finished_projects_path, "#{project_id}\n", callback
-
-getAllDocs = (project_id, callback = (error, docs) ->) ->
- excludes = {}
- for i in [0..12]
- excludes["rootFolder#{Array(i).join(".folders")}.docs.lines"] = 0
- db.projects.findOne _id: ObjectId(project_id.toString()), excludes, (error, project) ->
- return callback(error) if error?
- if !project?
- console.log "no such project #{project_id}"
- return callback()
- findAllDocsInProject project, (error, docs) ->
- return callback(error) if error?
- return callback null, docs, project
-
-findAllDocsInProject = (project, callback = (error, docs) ->) ->
- callback null, _findAllDocsInFolder project.rootFolder[0]
-
-findDocInProject = (project, doc_id, callback = (error, doc, mongoPath) ->) ->
- result = _findDocInFolder project.rootFolder[0], doc_id, "rootFolder.0"
- if result?
- callback null, result.doc, result.mongoPath
- else
- callback null, null, null
-
-_findDocInFolder = (folder = {}, doc_id, currentPath) ->
- for doc, i in folder.docs or []
- if doc?._id? and doc._id.toString() == doc_id.toString()
- return {
- doc: doc
- mongoPath: "#{currentPath}.docs.#{i}"
- }
- for childFolder, i in folder.folders or []
- result = _findDocInFolder childFolder, doc_id, "#{currentPath}.folders.#{i}"
- return result if result?
-
- return null
-
-_findAllDocsInFolder = (folder = {}) ->
- docs = folder.docs or []
- for childFolder in folder.folders or []
- docs = docs.concat _findAllDocsInFolder childFolder
- return docs
-
-isDocInDocCollection = (doc, callback)->
- if !doc?._id? or doc._id.length == 0
- return callback(null, true)
- db.docs.find({_id: ObjectId(doc._id+"")}, {_id: 1}).limit 1, (err, foundDocs)->
- exists = foundDocs.length > 0
- callback err, exists
-
-getWhichDocsCanBeDeleted = (docs, callback = (err, docsToBeDeleted, unmigratedDocs)->)->
- docsToBeDeleted = []
- unmigratedDocs = []
-
- jobs = _.map docs, (doc)->
- return (cb)->
- isDocInDocCollection doc, (err, exists)->
- if exists
- docsToBeDeleted.push doc
- else
- unmigratedDocs.push doc
- cb(err)
- async.series jobs, (err)->
- callback err, docsToBeDeleted, unmigratedDocs
-
-whipeDocLines = (project_id, mongoPath, callback)->
- update =
- $unset: {}
- update.$unset["#{mongoPath}.lines"] = ""
- update.$unset["#{mongoPath}.rev"] = ""
- db.projects.update _id: ObjectId(project_id+''), update, callback
-
-
-removeDocLinesFromProject = (docs, project, callback)->
- jobs = _.map docs, (doc)->
- (cb)->
- findDocInProject project, doc._id, (err, doc, mongoPath)->
- whipeDocLines project._id, mongoPath, cb
- async.parallelLimit jobs, 5, callback
-
-processNext = (project_id, callback)->
- if !project_id? or project_id.length == 0
- return callback()
- checkIfFileHasBeenProccessed project_id, (err, hasBeenProcessed)->
- if hasBeenProcessed
- console.log "#{project_id} already procssed, skipping"
- return callback()
- console.log "#{project_id} processing"
- getAllDocs project_id, (err, docs, project)->
- if err?
- console.error err, project_id, "could not get all docs"
- return callback(err)
- else
- getWhichDocsCanBeDeleted docs, (err, docsToBeDeleted, unmigratedDocs)->
- if err?
- console.error err, project_id, "could not save docs into mongo"
- return callback(err)
- markUnmigratedDocs project_id, unmigratedDocs, (err)->
- removeDocLinesFromProject docsToBeDeleted, project, (err)->
- if err?
- return callback(err)
- markProjectAsProcessed project_id, (err)->
- setTimeout(
- -> callback(err)
- ,0)
-
-exports.migrate = (client, done = ->)->
- getProjectIds (err, ids)->
- printProgress()
- jobs = _.map ids, (id)->
- return (cb)->
- processNext(id, cb)
- async.series jobs, (err)->
- if err?
- console.error err, "at end of jobs"
- else
- console.log "finished"
- done(err)
-
-
-exports.rollback = (next)->
- next()
-
diff --git a/migrations/3_pack_docHistory_collection.coffee b/migrations/3_pack_docHistory_collection.coffee
deleted file mode 100644
index c652dd7816..0000000000
--- a/migrations/3_pack_docHistory_collection.coffee
+++ /dev/null
@@ -1,342 +0,0 @@
-Settings = require "settings-sharelatex"
-fs = require("fs")
-mongojs = require("mongojs")
-ObjectId = mongojs.ObjectId
-db = mongojs(Settings.mongo.url, ['docs','docHistory', 'docHistoryStats'])
-_ = require("underscore")
-async = require("async")
-exec = require("child_process").exec
-bson = require('bson')
-BSON = new bson()
-
-logger = {
- log: ->
- err: ->
-}
-
-needToExit = false
-handleExit = () ->
- needToExit = true
- console.log('Got signal. Shutting down.')
-
-process.on 'SIGINT', handleExit
-process.on 'SIGHUP', handleExit
-
-finished_docs_path = "/tmp/finished-docs-3"
-all_docs_path = "/tmp/all-docs-3"
-unmigrated_docs_path = "/tmp/unmigrated-docs-3"
-
-finished_docs = {}
-if fs.existsSync(finished_docs_path)
- for id in fs.readFileSync(finished_docs_path,'utf-8').split("\n")
- finished_docs[id] = true
-
-getAndWriteDocids = (callback)->
- console.log "finding all doc id's - #{new Date().toString()}"
- db.docs.find {}, {_id:1}, (err, ids)->
- console.log "total found docs in mongo #{ids.length} - #{new Date().toString()}"
- ids = _.pluck ids, '_id'
- ids = _.filter ids, (id)-> id?
- fileData = ids.join("\n")
- fs.writeFileSync all_docs_path + ".tmp", fileData
- fs.renameSync all_docs_path + ".tmp", all_docs_path
- callback(err, ids)
-
-loadDocIds = (callback)->
- console.log "loading doc ids from #{all_docs_path}"
- data = fs.readFileSync all_docs_path, "utf-8"
- ids = data.split("\n")
- console.log "loaded #{ids.length} doc ids from #{all_docs_path}"
- callback null, ids
-
-getDocIds = (callback)->
- exists = fs.existsSync all_docs_path
- if exists
- loadDocIds callback
- else
- getAndWriteDocids callback
-
-markDocAsProcessed = (doc_id, callback)->
- finished_docs[doc_id] = true
- fs.appendFile finished_docs_path, "#{doc_id}\n", callback
-
-markDocAsUnmigrated = (doc_id, callback)->
- console.log "#{doc_id} unmigrated"
- markDocAsProcessed doc_id, (err)->
- fs.appendFile unmigrated_docs_path, "#{doc_id}\n", callback
-
-checkIfDocHasBeenProccessed = (doc_id, callback)->
- callback(null, finished_docs[doc_id])
-
-processNext = (doc_id, callback)->
- if !doc_id? or doc_id.length == 0
- return callback()
- if needToExit
- return callback(new Error("graceful shutdown"))
- checkIfDocHasBeenProccessed doc_id, (err, hasBeenProcessed)->
- if hasBeenProcessed
- console.log "#{doc_id} already processed, skipping"
- return callback()
- PackManager._packDocHistory doc_id, {}, (err) ->
- if err?
- console.log "error processing #{doc_id}"
- markDocAsUnmigrated doc_id, callback
- else
- markDocAsProcessed doc_id, callback
-
-updateIndexes = (callback) ->
- async.series [
- (cb) ->
- console.log "create index"
- db.docHistory.ensureIndex { project_id: 1, "meta.end_ts": 1, "meta.start_ts": -1 }, { background: true }, cb
- (cb) ->
- console.log "drop index"
- db.docHistory.dropIndex { project_id: 1, "meta.end_ts": 1 }, cb
- (cb) ->
- console.log "drop index"
- db.docHistory.dropIndex { project_id: 1, "pack.0.meta.end_ts": 1, "meta.end_ts": 1}, cb
- ], (err, results) ->
- console.log "all done"
- callback(err)
-
-exports.migrate = (client, done = ->)->
- getDocIds (err, ids)->
- totalDocCount = ids.length
- alreadyFinishedCount = Object.keys(finished_docs).length
- t0 = Date.now()
- printProgress = () ->
- count = Object.keys(finished_docs).length
- processedFraction = (count-alreadyFinishedCount)/totalDocCount
- remainingFraction = (totalDocCount-count)/totalDocCount
- t = Date.now()
- dt = (t-t0)*remainingFraction/processedFraction
- estFinishTime = new Date(t + dt)
- console.log "completed #{count}/#{totalDocCount} processed=#{processedFraction.toFixed(2)} remaining=#{remainingFraction.toFixed(2)} elapsed=#{(t-t0)/1000} est Finish=#{estFinishTime}"
- interval = setInterval printProgress, 3*1000
-
- nextId = null
-
- testFn = () ->
- return false if needToExit
- id = ids.shift()
- while id? and finished_docs[id] # skip finished
- id = ids.shift()
- nextId = id
- return nextId?
-
- executeFn = (cb) ->
- processNext nextId, cb
-
- async.whilst testFn, executeFn, (err)->
- if err?
- console.error err, "at end of jobs"
- else
- console.log "finished at #{new Date}"
- clearInterval interval
- done(err)
-
-exports.rollback = (client, done)->
- done()
-
-# process.nextTick () ->
-# exports.migrate () ->
-# console.log "done"
-
-DAYS = 24 * 3600 * 1000 # one day in milliseconds
-
-# copied from track-changes/app/coffee/PackManager.coffee
-
-PackManager =
- MAX_SIZE: 1024*1024 # make these configurable parameters
- MAX_COUNT: 512
-
- convertDocsToPacks: (docs, callback) ->
- packs = []
- top = null
- docs.forEach (d,i) ->
- # skip existing packs
- if d.pack?
- top = null
- return
- sz = BSON.calculateObjectSize(d)
- # decide if this doc can be added to the current pack
- validLength = top? && (top.pack.length < PackManager.MAX_COUNT)
- validSize = top? && (top.sz + sz < PackManager.MAX_SIZE)
- bothPermanent = top? && (top.expiresAt? is false) && (d.expiresAt? is false)
- bothTemporary = top? && (top.expiresAt? is true) && (d.expiresAt? is true)
- within1Day = bothTemporary && (d.meta.start_ts - top.meta.start_ts < 24 * 3600 * 1000)
- if top? && validLength && validSize && (bothPermanent || (bothTemporary && within1Day))
- top.pack = top.pack.concat {v: d.v, meta: d.meta, op: d.op, _id: d._id}
- top.sz += sz
- top.n += 1
- top.v_end = d.v
- top.meta.end_ts = d.meta.end_ts
- top.expiresAt = d.expiresAt if top.expiresAt?
- return
- else
- # create a new pack
- top = _.clone(d)
- top.pack = [ {v: d.v, meta: d.meta, op: d.op, _id: d._id} ]
- top.meta = { start_ts: d.meta.start_ts, end_ts: d.meta.end_ts }
- top.sz = sz
- top.n = 1
- top.v_end = d.v
- delete top.op
- delete top._id
- packs.push top
-
- callback(null, packs)
-
- checkHistory: (docs, callback) ->
- errors = []
- prev = null
- error = (args...) ->
- errors.push args
- docs.forEach (d,i) ->
- if d.pack?
- n = d.pack.length
- last = d.pack[n-1]
- error('bad pack v_end', d) if d.v_end != last.v
- error('bad pack start_ts', d) if d.meta.start_ts != d.pack[0].meta.start_ts
- error('bad pack end_ts', d) if d.meta.end_ts != last.meta.end_ts
- d.pack.forEach (p, i) ->
- prev = v
- v = p.v
- error('bad version', v, 'in', p) if v <= prev
- #error('expired op', p, 'in pack') if p.expiresAt?
- else
- prev = v
- v = d.v
- error('bad version', v, 'in', d) if v <= prev
- if errors.length
- callback(errors)
- else
- callback()
-
- insertPack: (packObj, callback) ->
- bulk = db.docHistory.initializeOrderedBulkOp()
- doc_id = packObj.doc_id
- expect_nInserted = 1
- expect_nRemoved = packObj.pack.length
- logger.log {doc_id: doc_id}, "adding pack, removing #{expect_nRemoved} ops"
- bulk.insert packObj
- ids = (op._id for op in packObj.pack)
- bulk.find({_id:{$in:ids}}).remove()
- bulk.execute (err, result) ->
- if err?
- logger.error {doc_id: doc_id}, "error adding pack"
- callback(err, result)
- else if result.nInserted != expect_nInserted or result.nRemoved != expect_nRemoved
- logger.error {doc_id: doc_id, result}, "unexpected result adding pack"
- callback(new Error(
- msg: 'unexpected result'
- expected: {expect_nInserted, expect_nRemoved}
- ), result)
- else
- db.docHistoryStats.update {doc_id:doc_id}, {
- $inc:{update_count:-expect_nRemoved},
- $currentDate:{last_packed:true}
- }, {upsert:true}, () ->
- callback(err, result)
-
- # retrieve document ops/packs and check them
- getDocHistory: (doc_id, callback) ->
- db.docHistory.find({doc_id:ObjectId(doc_id)}).sort {v:1}, (err, docs) ->
- return callback(err) if err?
- # for safety, do a consistency check of the history
- logger.log {doc_id}, "checking history for document"
- PackManager.checkHistory docs, (err) ->
- return callback(err) if err?
- callback(err, docs)
- #PackManager.deleteExpiredPackOps docs, (err) ->
- # return callback(err) if err?
- # callback err, docs
-
- packDocHistory: (doc_id, options, callback) ->
- if typeof callback == "undefined" and typeof options == 'function'
- callback = options
- options = {}
- LockManager.runWithLock(
- "HistoryLock:#{doc_id}",
- (releaseLock) ->
- PackManager._packDocHistory(doc_id, options, releaseLock)
- , callback
- )
-
- _packDocHistory: (doc_id, options, callback) ->
- logger.log {doc_id},"starting pack operation for document history"
-
- PackManager.getDocHistory doc_id, (err, docs) ->
- return callback(err) if err?
- origDocs = 0
- origPacks = 0
- for d in docs
- if d.pack? then origPacks++ else origDocs++
- PackManager.convertDocsToPacks docs, (err, packs) ->
- return callback(err) if err?
- total = 0
- for p in packs
- total = total + p.pack.length
- logger.log {doc_id, origDocs, origPacks, newPacks: packs.length, totalOps: total}, "document stats"
- if packs.length
- if options['dry-run']
- logger.log {doc_id}, 'dry-run, skipping write packs'
- return callback()
- PackManager.savePacks packs, (err) ->
- return callback(err) if err?
- # check the history again
- PackManager.getDocHistory doc_id, callback
- else
- logger.log {doc_id}, "no packs to write"
- # keep a record that we checked this one to avoid rechecking it
- db.docHistoryStats.update {doc_id:doc_id}, {
- $currentDate:{last_checked:true}
- }, {upsert:true}, () ->
- callback null, null
-
- DB_WRITE_DELAY: 100
-
- savePacks: (packs, callback) ->
- async.eachSeries packs, PackManager.safeInsert, (err, result) ->
- if err?
- logger.log {err, result}, "error writing packs"
- callback err, result
- else
- callback()
-
- safeInsert: (packObj, callback) ->
- PackManager.insertPack packObj, (err, result) ->
- setTimeout () ->
- callback(err,result)
- , PackManager.DB_WRITE_DELAY
-
- deleteExpiredPackOps: (docs, callback) ->
- now = Date.now()
- toRemove = []
- toUpdate = []
- docs.forEach (d,i) ->
- if d.pack?
- newPack = d.pack.filter (op) ->
- if op.expiresAt? then op.expiresAt > now else true
- if newPack.length == 0
- toRemove.push d
- else if newPack.length < d.pack.length
- # adjust the pack properties
- d.pack = newPack
- first = d.pack[0]
- last = d.pack[d.pack.length - 1]
- d.v_end = last.v
- d.meta.start_ts = first.meta.start_ts
- d.meta.end_ts = last.meta.end_ts
- toUpdate.push d
- if toRemove.length or toUpdate.length
- bulk = db.docHistory.initializeOrderedBulkOp()
- toRemove.forEach (pack) ->
- console.log "would remove", pack
- #bulk.find({_id:pack._id}).removeOne()
- toUpdate.forEach (pack) ->
- console.log "would update", pack
- #bulk.find({_id:pack._id}).updateOne(pack);
- bulk.execute callback
- else
- callback()
diff --git a/migrations/4_update_user_features.coffee b/migrations/4_update_user_features.coffee
deleted file mode 100644
index 680df3c459..0000000000
--- a/migrations/4_update_user_features.coffee
+++ /dev/null
@@ -1,38 +0,0 @@
-Settings = require "settings-sharelatex"
-fs = require("fs")
-mongojs = require("mongojs")
-ObjectId = mongojs.ObjectId
-db = mongojs(Settings.mongo.url, ['users'])
-_ = require("underscore")
-
-
-handleExit = () ->
- console.log('Got signal. Shutting down.')
-
-
-process.on 'SIGINT', handleExit
-process.on 'SIGHUP', handleExit
-
-
-exports.migrate = (client, done=()->) ->
- patch = {
- $set: {
- features: {
- collaborators: -1
- dropbox: true
- versioning: true
- references: true
- templates: true
- compileTimeout: 180
- compileGroup: "standard"
- }
- }
- }
- console.log ">> updating all user features: ", patch
- db.users.update {}, patch, {multi: true}, (err) ->
- console.log "finished updating all user features"
- return done(err)
-
-
-exports.rollback = (client, done) ->
- done()
diff --git a/migrations/5_remove_holding_accounts.coffee b/migrations/5_remove_holding_accounts.coffee
deleted file mode 100644
index ee796b3023..0000000000
--- a/migrations/5_remove_holding_accounts.coffee
+++ /dev/null
@@ -1,103 +0,0 @@
-Settings = require "settings-sharelatex"
-mongojs = require("mongojs")
-ObjectId = mongojs.ObjectId
-db = mongojs(Settings.mongo.url, ['users', 'projects', 'subscriptions'])
-async = require "async"
-
-module.exports = HoldingAccountMigration =
- DRY_RUN: true
-
- findHoldingAccounts: (callback = (error, users) ->) ->
- db.users.find({holdingAccount: true, hashedPassword: { $exists: false }}, {holdingAccount: 1, email: 1}, callback)
-
- deleteUserProjects: (user_id, callback = (error) ->) ->
- # Holding accounts can't own projects, so only remove from
- # collaberator_refs and readOnly_refs
- console.log "[Removing user from projects]", user_id
- db.projects.find {
- $or: [
- {collaberator_refs: user_id},
- {readOnly_refs: user_id}
- ]
- }, { collaberator_refs: 1, readOnly_refs: 1 }, (error, projects = []) ->
- return callback(error) if error?
- jobs = projects.map (project) ->
- (cb) ->
- console.log "[Removing user from project]", user_id, JSON.stringify(project)
- if !project?._id?
- throw new Error("no project id")
-
- if !HoldingAccountMigration.DRY_RUN
- db.projects.update {
- _id: project._id
- }, {
- $pull: {
- collaberator_refs: user_id,
- readOnly_refs: user_id
- }
- }, (error, result) ->
- return cb(error) if error?
- console.log "[Removed user from project]", user_id, project._id, result
- cb()
- else
- console.log "[Would have removed user from project]", user_id, project._id
- cb()
-
- async.series jobs, callback
-
- deleteUser: (user_id, callback = (error) ->) ->
- if !user_id?
- throw new Error("must have user_id")
- if !HoldingAccountMigration.DRY_RUN
- db.users.remove {_id: user_id, holdingAccount: true}, (error, result) ->
- return callback(error) if error?
- console.log "[Removed user]", user_id, result
- if result.n != 1
- return callback(new Error("failed to remove user as expected"))
- callback()
- else
- console.log "[Would have removed user]", user_id
- callback()
-
- migrateGroupInvites: (user_id, email, callback = (error) ->) ->
- if !user_id?
- throw new Error("must have user_id")
- if !HoldingAccountMigration.DRY_RUN
- db.subscriptions.update {member_ids: user_id}, {
- $pull: { member_ids: user_id },
- $addToSet : { invited_emails: email }
- }, { multi : true }, (error, result) ->
- return callback(error) if error?
- console.log "[Migrated user in group accounts]", user_id, email, result
- callback()
- else
- console.log "[Would have migrated user in group accounts]", user_id, email
- callback()
-
- run: (done = () ->) ->
- console.log "[Getting list of holding accounts]"
- HoldingAccountMigration.findHoldingAccounts (error, users) ->
- throw error if error?
- console.log "[Got #{users.length} holding accounts]"
- i = 0
- jobs = users.map (u) ->
- (cb) ->
- console.log "[Removing user #{i++}/#{users.length}]"
- HoldingAccountMigration.migrateGroupInvites u._id, u.email, (error) ->
- return cb(error) if error?
- HoldingAccountMigration.deleteUser u._id, (error) ->
- return cb(error) if error?
- HoldingAccountMigration.deleteUserProjects u._id, (error) ->
- return cb(error) if error?
- setTimeout cb, 50 # Small delay to not hammer DB
- async.series jobs, (error) ->
- throw error if error?
- console.log "[FINISHED]"
- done()
-
- migrate: (client, done=()->) ->
- HoldingAccountMigration.DRY_RUN = false
- HoldingAccountMigration.run(done)
-
- rollback: (client, done) ->
- done()
diff --git a/migrations/6_add_track_changes_feature.coffee b/migrations/6_add_track_changes_feature.coffee
deleted file mode 100644
index e807102faa..0000000000
--- a/migrations/6_add_track_changes_feature.coffee
+++ /dev/null
@@ -1,30 +0,0 @@
-Settings = require "settings-sharelatex"
-fs = require("fs")
-mongojs = require("mongojs")
-ObjectId = mongojs.ObjectId
-db = mongojs(Settings.mongo.url, ['users'])
-_ = require("underscore")
-
-
-handleExit = () ->
- console.log('Got signal. Shutting down.')
-
-
-process.on 'SIGINT', handleExit
-process.on 'SIGHUP', handleExit
-
-
-exports.migrate = (client, done=()->) ->
- patch = {
- $set: {
- 'features.trackChanges': true
- }
- }
- console.log ">> enabling trackChanges feature: ", patch
- db.users.update {}, patch, {multi: true}, (err) ->
- console.log "finished enabling trackChanges feature"
- return done(err)
-
-
-exports.rollback = (client, done) ->
- done()
diff --git a/migrations/7_add_token_indexes.coffee b/migrations/7_add_token_indexes.coffee
deleted file mode 100644
index 867c209753..0000000000
--- a/migrations/7_add_token_indexes.coffee
+++ /dev/null
@@ -1,51 +0,0 @@
-Settings = require "settings-sharelatex"
-fs = require("fs")
-mongojs = require("mongojs")
-ObjectId = mongojs.ObjectId
-db = mongojs(Settings.mongo.url, ['docs','docHistory', 'docHistoryStats'])
-_ = require("underscore")
-async = require("async")
-exec = require("child_process").exec
-bson = require('bson')
-BSON = new bson()
-
-
-handleExit = () ->
- console.log('Got signal. Shutting down.')
-
-
-exports.migrate = (client, done=()->) ->
- console.log ">> Adding indexes for token-based project access: "
- db.projects.ensureIndex {'tokens.readAndWrite': 1}, {
- partialFilterExpression: { 'tokens.readAndWrite': { $exists: true } },
- unique: true,
- background: true
- }, (err) ->
- if err?
- return done(err)
- db.projects.ensureIndex {'tokens.readOnly': 1}, {
- partialFilterExpression: { 'tokens.readOnly': { $exists: true } },
- unique: true,
- background: true
- }, (err) ->
- if err?
- return done(err)
- db.projects.ensureIndex {tokenAccessReadAndWrite_refs: 1}, {
- background: true
- }, (err) ->
- if err?
- return done(err)
- db.projects.ensureIndex {tokenAccessOnly_refs: 1}, {
- background: true
- }, (err) ->
- console.log ">> done adding indexes for token-based project access"
- done()
-
-
-exports.rollback = (client, done) ->
- done()
-
-
-process.on 'SIGINT', handleExit
-process.on 'SIGHUP', handleExit
-
diff --git a/migrations/7_add_track_changes_feature_again.coffee b/migrations/7_add_track_changes_feature_again.coffee
deleted file mode 100644
index b942e61f3f..0000000000
--- a/migrations/7_add_track_changes_feature_again.coffee
+++ /dev/null
@@ -1,31 +0,0 @@
-#This is needed because we forgot to add track changes into the default settings
-Settings = require "settings-sharelatex"
-fs = require("fs")
-mongojs = require("mongojs")
-ObjectId = mongojs.ObjectId
-db = mongojs(Settings.mongo.url, ['users'])
-_ = require("underscore")
-
-
-handleExit = () ->
- console.log('Got signal. Shutting down.')
-
-
-process.on 'SIGINT', handleExit
-process.on 'SIGHUP', handleExit
-
-
-exports.migrate = (client, done=()->) ->
- patch = {
- $set: {
- 'features.trackChanges': true
- }
- }
- console.log ">> enabling trackChanges feature: ", patch
- db.users.update {}, patch, {multi: true}, (err) ->
- console.log "finished enabling trackChanges feature"
- return done(err)
-
-
-exports.rollback = (client, done) ->
- done()
diff --git a/migrations/9_create_user_emails_array.js b/migrations/9_create_user_emails_array.js
deleted file mode 100644
index b9216c2a4c..0000000000
--- a/migrations/9_create_user_emails_array.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const Settings = require('settings-sharelatex')
-const mongojs = require('mongojs')
-const db = mongojs(Settings.mongo.url, ['users'])
-const async = require('async')
-
-const handleExit = () => console.log('Got signal. Shutting down.')
-process.on('SIGINT', handleExit)
-process.on('SIGHUP', handleExit)
-
-const initUserEmailsAttribute = (user, callback) => {
- const update = {
- $set: {
- emails: [
- {
- email: user.email,
- createdAt: new Date()
- }
- ]
- }
- }
- db.users.update({ _id: user._id }, update, callback)
-}
-
-const updateAllUsersEmailsAttribute = (users, callback) => {
- console.log(`updating ${users.length} users`)
- async.eachSeries(users, initUserEmailsAttribute, callback)
-}
-
-exports.migrate = (client, done) =>
- db.users.find(
- { emails: { $exists: false } },
- { email: 1 },
- (error, users) => {
- if (error) {
- callback(error)
- } else {
- updateAllUsersEmailsAttribute(users, done)
- }
- }
- )
-
-exports.rollback = (client, done) => {
- const update = {
- $unset: {
- emails: 1
- }
- }
- db.users.update({ emails: { $exists: true } }, update, done)
-}
diff --git a/migrations/about_migrations.md b/migrations/about_migrations.md
deleted file mode 100644
index 97f91442b1..0000000000
--- a/migrations/about_migrations.md
+++ /dev/null
@@ -1,9 +0,0 @@
-If migration is stopped mid way it will start at the beginging next time
-
-To see the run migrations do db.getCollection('_migrations').find() you can't do db._migrations.find()
-
-When testing, to roll back a migration run:
-
-```
-./node_modules/east/bin/east rollback 5 --adapter east-mongo --url mongodb://localhost:27017/sharelatex
-```
diff --git a/nginx/nginx.conf b/nginx/nginx.conf.template
similarity index 80%
rename from nginx/nginx.conf
rename to nginx/nginx.conf.template
index c4311103b0..e3cf283e85 100644
--- a/nginx/nginx.conf
+++ b/nginx/nginx.conf.template
@@ -1,10 +1,14 @@
+## ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ##
+## ! This file was generated from a template ! ##
+## ! See /etc/nginx/templates/ ! ##
+## ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ##
daemon off;
user www-data;
-worker_processes 4;
+worker_processes ${NGINX_WORKER_PROCESSES};
pid /run/nginx.pid;
events {
- worker_connections 768;
+ worker_connections ${NGINX_WORKER_CONNECTIONS};
# multi_accept on;
}
@@ -63,7 +67,7 @@ http {
##
# Uncomment it if you installed nginx-passenger
##
-
+
#passenger_root /usr;
#passenger_ruby /usr/bin/ruby;
diff --git a/nginx/sharelatex.conf b/nginx/sharelatex.conf
index 3a10a999f0..723fb7fbe7 100644
--- a/nginx/sharelatex.conf
+++ b/nginx/sharelatex.conf
@@ -9,10 +9,11 @@ server {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
+ proxy_set_header Host $host;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_read_timeout 3m;
- proxy_send_timeout 3m;
+ proxy_read_timeout 10m;
+ proxy_send_timeout 10m;
}
location /socket.io {
@@ -20,11 +21,12 @@ server {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
+ proxy_set_header Host $host;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_read_timeout 3m;
- proxy_send_timeout 3m;
+ proxy_read_timeout 10m;
+ proxy_send_timeout 10m;
}
location /stylesheets {
diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json
deleted file mode 100644
index f5fd1370e1..0000000000
--- a/npm-shrinkwrap.json
+++ /dev/null
@@ -1,610 +0,0 @@
-{
- "name": "sharelatex",
- "version": "0.0.1",
- "dependencies": {
- "async": {
- "version": "0.9.2",
- "from": "async@>=0.9.0 <0.10.0",
- "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz"
- },
- "bson": {
- "version": "1.0.4",
- "from": "bson@>=1.0.4 <2.0.0",
- "resolved": "https://registry.npmjs.org/bson/-/bson-1.0.4.tgz"
- },
- "coffee-script": {
- "version": "1.12.7",
- "from": "coffee-script@>=1.11.1 <2.0.0",
- "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz"
- },
- "east": {
- "version": "0.5.7",
- "from": "east@0.5.7",
- "resolved": "http://registry.npmjs.org/east/-/east-0.5.7.tgz",
- "dependencies": {
- "commander": {
- "version": "2.9.0",
- "from": "commander@2.9.0",
- "resolved": "http://registry.npmjs.org/commander/-/commander-2.9.0.tgz",
- "dependencies": {
- "graceful-readlink": {
- "version": "1.0.1",
- "from": "graceful-readlink@>=1.0.0",
- "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz"
- }
- }
- },
- "expressionify": {
- "version": "0.9.3",
- "from": "expressionify@0.9.3",
- "resolved": "http://registry.npmjs.org/expressionify/-/expressionify-0.9.3.tgz"
- },
- "progress": {
- "version": "1.1.8",
- "from": "progress@1.1.8",
- "resolved": "http://registry.npmjs.org/progress/-/progress-1.1.8.tgz"
- },
- "twostep": {
- "version": "0.4.2",
- "from": "twostep@0.4.2",
- "resolved": "http://registry.npmjs.org/twostep/-/twostep-0.4.2.tgz"
- }
- }
- },
- "east-mongo": {
- "version": "0.3.3",
- "from": "east-mongo@0.3.3",
- "resolved": "http://registry.npmjs.org/east-mongo/-/east-mongo-0.3.3.tgz"
- },
- "grunt-shell": {
- "version": "1.3.1",
- "from": "grunt-shell@>=1.1.1 <2.0.0",
- "resolved": "http://registry.npmjs.org/grunt-shell/-/grunt-shell-1.3.1.tgz",
- "dependencies": {
- "chalk": {
- "version": "1.1.3",
- "from": "chalk@>=1.0.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
- "dependencies": {
- "ansi-styles": {
- "version": "2.2.1",
- "from": "ansi-styles@>=2.2.1 <3.0.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz"
- },
- "escape-string-regexp": {
- "version": "1.0.5",
- "from": "escape-string-regexp@>=1.0.2 <2.0.0",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz"
- },
- "has-ansi": {
- "version": "2.0.0",
- "from": "has-ansi@>=2.0.0 <3.0.0",
- "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
- "dependencies": {
- "ansi-regex": {
- "version": "2.1.1",
- "from": "ansi-regex@>=2.0.0 <3.0.0",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz"
- }
- }
- },
- "strip-ansi": {
- "version": "3.0.1",
- "from": "strip-ansi@>=3.0.0 <4.0.0",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
- "dependencies": {
- "ansi-regex": {
- "version": "2.1.1",
- "from": "ansi-regex@>=2.0.0 <3.0.0",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz"
- }
- }
- },
- "supports-color": {
- "version": "2.0.0",
- "from": "supports-color@>=2.0.0 <3.0.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz"
- }
- }
- },
- "npm-run-path": {
- "version": "1.0.0",
- "from": "npm-run-path@>=1.0.0 <2.0.0",
- "resolved": "http://registry.npmjs.org/npm-run-path/-/npm-run-path-1.0.0.tgz",
- "dependencies": {
- "path-key": {
- "version": "1.0.0",
- "from": "path-key@>=1.0.0 <2.0.0",
- "resolved": "http://registry.npmjs.org/path-key/-/path-key-1.0.0.tgz"
- }
- }
- },
- "object-assign": {
- "version": "4.1.1",
- "from": "object-assign@>=4.0.0 <5.0.0",
- "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz"
- }
- }
- },
- "load-grunt-config": {
- "version": "0.19.2",
- "from": "load-grunt-config@>=0.19.2 <0.20.0",
- "resolved": "http://registry.npmjs.org/load-grunt-config/-/load-grunt-config-0.19.2.tgz",
- "dependencies": {
- "cson": {
- "version": "3.0.2",
- "from": "cson@>=3.0.2 <3.1.0",
- "resolved": "http://registry.npmjs.org/cson/-/cson-3.0.2.tgz",
- "dependencies": {
- "cson-parser": {
- "version": "1.3.5",
- "from": "cson-parser@>=1.0.6 <2.0.0",
- "resolved": "http://registry.npmjs.org/cson-parser/-/cson-parser-1.3.5.tgz"
- },
- "extract-opts": {
- "version": "3.3.1",
- "from": "extract-opts@>=3.0.1 <4.0.0",
- "resolved": "http://registry.npmjs.org/extract-opts/-/extract-opts-3.3.1.tgz",
- "dependencies": {
- "eachr": {
- "version": "3.2.0",
- "from": "eachr@>=3.2.0 <4.0.0",
- "resolved": "http://registry.npmjs.org/eachr/-/eachr-3.2.0.tgz"
- },
- "editions": {
- "version": "1.3.4",
- "from": "editions@>=1.1.1 <2.0.0",
- "resolved": "http://registry.npmjs.org/editions/-/editions-1.3.4.tgz"
- },
- "typechecker": {
- "version": "4.4.1",
- "from": "typechecker@>=4.3.0 <5.0.0",
- "resolved": "http://registry.npmjs.org/typechecker/-/typechecker-4.4.1.tgz"
- }
- }
- },
- "requirefresh": {
- "version": "2.1.0",
- "from": "requirefresh@>=2.0.0 <3.0.0",
- "resolved": "http://registry.npmjs.org/requirefresh/-/requirefresh-2.1.0.tgz",
- "dependencies": {
- "editions": {
- "version": "1.3.4",
- "from": "editions@>=1.1.1 <2.0.0",
- "resolved": "http://registry.npmjs.org/editions/-/editions-1.3.4.tgz"
- }
- }
- },
- "safefs": {
- "version": "4.1.0",
- "from": "safefs@>=4.0.0 <5.0.0",
- "resolved": "http://registry.npmjs.org/safefs/-/safefs-4.1.0.tgz",
- "dependencies": {
- "editions": {
- "version": "1.3.4",
- "from": "editions@>=1.1.1 <2.0.0",
- "resolved": "http://registry.npmjs.org/editions/-/editions-1.3.4.tgz"
- },
- "graceful-fs": {
- "version": "4.1.11",
- "from": "graceful-fs@>=4.1.4 <5.0.0",
- "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz"
- }
- }
- }
- }
- },
- "glob": {
- "version": "5.0.15",
- "from": "glob@>=5.0.15 <5.1.0",
- "resolved": "http://registry.npmjs.org/glob/-/glob-5.0.15.tgz",
- "dependencies": {
- "inflight": {
- "version": "1.0.6",
- "from": "inflight@>=1.0.4 <2.0.0",
- "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
- "dependencies": {
- "wrappy": {
- "version": "1.0.2",
- "from": "wrappy@>=1.0.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz"
- }
- }
- },
- "inherits": {
- "version": "2.0.3",
- "from": "inherits@>=2.0.0 <3.0.0",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz"
- },
- "minimatch": {
- "version": "3.0.4",
- "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
- "dependencies": {
- "brace-expansion": {
- "version": "1.1.8",
- "from": "brace-expansion@>=1.1.7 <2.0.0",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz",
- "dependencies": {
- "balanced-match": {
- "version": "1.0.0",
- "from": "balanced-match@>=1.0.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz"
- },
- "concat-map": {
- "version": "0.0.1",
- "from": "concat-map@0.0.1",
- "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz"
- }
- }
- }
- }
- },
- "once": {
- "version": "1.4.0",
- "from": "once@>=1.3.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
- "dependencies": {
- "wrappy": {
- "version": "1.0.2",
- "from": "wrappy@>=1.0.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz"
- }
- }
- },
- "path-is-absolute": {
- "version": "1.0.1",
- "from": "path-is-absolute@>=1.0.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz"
- }
- }
- },
- "jit-grunt": {
- "version": "0.10.0",
- "from": "jit-grunt@>=0.10.0 <0.11.0",
- "resolved": "http://registry.npmjs.org/jit-grunt/-/jit-grunt-0.10.0.tgz"
- },
- "js-yaml": {
- "version": "3.4.6",
- "from": "js-yaml@>=3.4.3 <3.5.0",
- "resolved": "http://registry.npmjs.org/js-yaml/-/js-yaml-3.4.6.tgz",
- "dependencies": {
- "argparse": {
- "version": "1.0.9",
- "from": "argparse@>=1.0.2 <2.0.0",
- "resolved": "http://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz",
- "dependencies": {
- "sprintf-js": {
- "version": "1.0.3",
- "from": "sprintf-js@>=1.0.2 <1.1.0",
- "resolved": "http://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz"
- }
- }
- },
- "esprima": {
- "version": "2.7.3",
- "from": "esprima@>=2.6.0 <3.0.0",
- "resolved": "http://registry.npmjs.org/esprima/-/esprima-2.7.3.tgz"
- },
- "inherit": {
- "version": "2.2.6",
- "from": "inherit@>=2.2.2 <3.0.0",
- "resolved": "http://registry.npmjs.org/inherit/-/inherit-2.2.6.tgz"
- }
- }
- },
- "load-grunt-tasks": {
- "version": "3.3.0",
- "from": "load-grunt-tasks@>=3.3.0 <3.4.0",
- "resolved": "http://registry.npmjs.org/load-grunt-tasks/-/load-grunt-tasks-3.3.0.tgz",
- "dependencies": {
- "arrify": {
- "version": "1.0.1",
- "from": "arrify@>=1.0.0 <2.0.0",
- "resolved": "http://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz"
- },
- "multimatch": {
- "version": "2.1.0",
- "from": "multimatch@>=2.0.0 <3.0.0",
- "resolved": "http://registry.npmjs.org/multimatch/-/multimatch-2.1.0.tgz",
- "dependencies": {
- "array-differ": {
- "version": "1.0.0",
- "from": "array-differ@>=1.0.0 <2.0.0",
- "resolved": "http://registry.npmjs.org/array-differ/-/array-differ-1.0.0.tgz"
- },
- "array-union": {
- "version": "1.0.2",
- "from": "array-union@>=1.0.1 <2.0.0",
- "resolved": "http://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz",
- "dependencies": {
- "array-uniq": {
- "version": "1.0.3",
- "from": "array-uniq@>=1.0.1 <2.0.0",
- "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz"
- }
- }
- },
- "minimatch": {
- "version": "3.0.4",
- "from": "minimatch@>=3.0.0 <4.0.0",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
- "dependencies": {
- "brace-expansion": {
- "version": "1.1.8",
- "from": "brace-expansion@>=1.1.7 <2.0.0",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz",
- "dependencies": {
- "balanced-match": {
- "version": "1.0.0",
- "from": "balanced-match@>=1.0.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz"
- },
- "concat-map": {
- "version": "0.0.1",
- "from": "concat-map@0.0.1",
- "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz"
- }
- }
- }
- }
- }
- }
- },
- "pkg-up": {
- "version": "1.0.0",
- "from": "pkg-up@>=1.0.0 <2.0.0",
- "resolved": "http://registry.npmjs.org/pkg-up/-/pkg-up-1.0.0.tgz",
- "dependencies": {
- "find-up": {
- "version": "1.1.2",
- "from": "find-up@>=1.0.0 <2.0.0",
- "resolved": "http://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz",
- "dependencies": {
- "path-exists": {
- "version": "2.1.0",
- "from": "path-exists@>=2.0.0 <3.0.0",
- "resolved": "http://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz"
- },
- "pinkie-promise": {
- "version": "2.0.1",
- "from": "pinkie-promise@>=2.0.0 <3.0.0",
- "resolved": "http://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz",
- "dependencies": {
- "pinkie": {
- "version": "2.0.4",
- "from": "pinkie@>=2.0.0 <3.0.0",
- "resolved": "http://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz"
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- },
- "lodash": {
- "version": "3.10.1",
- "from": "lodash@>=3.0.0 <4.0.0",
- "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz"
- },
- "mongodb": {
- "version": "2.2.34",
- "from": "mongodb@>=2.2.34 <3.0.0",
- "resolved": "http://registry.npmjs.org/mongodb/-/mongodb-2.2.34.tgz",
- "dependencies": {
- "es6-promise": {
- "version": "3.2.1",
- "from": "es6-promise@3.2.1",
- "resolved": "http://registry.npmjs.org/es6-promise/-/es6-promise-3.2.1.tgz"
- },
- "mongodb-core": {
- "version": "2.1.18",
- "from": "mongodb-core@2.1.18",
- "resolved": "http://registry.npmjs.org/mongodb-core/-/mongodb-core-2.1.18.tgz",
- "dependencies": {
- "require_optional": {
- "version": "1.0.1",
- "from": "require_optional@>=1.0.0 <1.1.0",
- "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz",
- "dependencies": {
- "semver": {
- "version": "5.5.0",
- "from": "semver@>=5.1.0 <6.0.0",
- "resolved": "http://registry.npmjs.org/semver/-/semver-5.5.0.tgz"
- },
- "resolve-from": {
- "version": "2.0.0",
- "from": "resolve-from@>=2.0.0 <3.0.0",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz"
- }
- }
- }
- }
- },
- "readable-stream": {
- "version": "2.2.7",
- "from": "readable-stream@2.2.7",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.7.tgz",
- "dependencies": {
- "buffer-shims": {
- "version": "1.0.0",
- "from": "buffer-shims@>=1.0.0 <1.1.0",
- "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz"
- },
- "core-util-is": {
- "version": "1.0.2",
- "from": "core-util-is@>=1.0.0 <1.1.0",
- "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"
- },
- "isarray": {
- "version": "1.0.0",
- "from": "isarray@>=1.0.0 <1.1.0",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz"
- },
- "inherits": {
- "version": "2.0.3",
- "from": "inherits@>=2.0.1 <2.1.0",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz"
- },
- "process-nextick-args": {
- "version": "1.0.7",
- "from": "process-nextick-args@>=1.0.6 <1.1.0",
- "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz"
- },
- "string_decoder": {
- "version": "1.0.3",
- "from": "string_decoder@>=1.0.0 <1.1.0",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz",
- "dependencies": {
- "safe-buffer": {
- "version": "5.1.1",
- "from": "safe-buffer@>=5.1.0 <5.2.0",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz"
- }
- }
- },
- "util-deprecate": {
- "version": "1.0.2",
- "from": "util-deprecate@>=1.0.1 <1.1.0",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"
- }
- }
- }
- }
- },
- "mongojs": {
- "version": "2.4.0",
- "from": "mongojs@2.4.0",
- "resolved": "http://registry.npmjs.org/mongojs/-/mongojs-2.4.0.tgz",
- "dependencies": {
- "each-series": {
- "version": "1.0.0",
- "from": "each-series@>=1.0.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz"
- },
- "once": {
- "version": "1.4.0",
- "from": "once@>=1.3.2 <2.0.0",
- "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
- "dependencies": {
- "wrappy": {
- "version": "1.0.2",
- "from": "wrappy@>=1.0.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz"
- }
- }
- },
- "parse-mongo-url": {
- "version": "1.1.1",
- "from": "parse-mongo-url@>=1.1.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz"
- },
- "readable-stream": {
- "version": "2.3.3",
- "from": "readable-stream@>=2.0.2 <3.0.0",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.3.tgz",
- "dependencies": {
- "core-util-is": {
- "version": "1.0.2",
- "from": "core-util-is@>=1.0.0 <1.1.0",
- "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"
- },
- "inherits": {
- "version": "2.0.3",
- "from": "inherits@>=2.0.3 <2.1.0",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz"
- },
- "isarray": {
- "version": "1.0.0",
- "from": "isarray@>=1.0.0 <1.1.0",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz"
- },
- "process-nextick-args": {
- "version": "1.0.7",
- "from": "process-nextick-args@>=1.0.6 <1.1.0",
- "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz"
- },
- "safe-buffer": {
- "version": "5.1.1",
- "from": "safe-buffer@>=5.1.1 <5.2.0",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz"
- },
- "string_decoder": {
- "version": "1.0.3",
- "from": "string_decoder@>=1.0.3 <1.1.0",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz"
- },
- "util-deprecate": {
- "version": "1.0.2",
- "from": "util-deprecate@>=1.0.1 <1.1.0",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"
- }
- }
- },
- "thunky": {
- "version": "0.1.0",
- "from": "thunky@>=0.1.0 <0.2.0",
- "resolved": "https://registry.npmjs.org/thunky/-/thunky-0.1.0.tgz"
- },
- "to-mongodb-core": {
- "version": "2.0.0",
- "from": "to-mongodb-core@>=2.0.0 <3.0.0",
- "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz"
- },
- "xtend": {
- "version": "4.0.1",
- "from": "xtend@>=4.0.0 <5.0.0",
- "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
- }
- }
- },
- "redis": {
- "version": "2.8.0",
- "from": "redis@>=2.6.2 <3.0.0",
- "resolved": "https://registry.npmjs.org/redis/-/redis-2.8.0.tgz",
- "dependencies": {
- "double-ended-queue": {
- "version": "2.1.0-0",
- "from": "double-ended-queue@>=2.1.0-0 <3.0.0",
- "resolved": "https://registry.npmjs.org/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz"
- },
- "redis-commands": {
- "version": "1.3.1",
- "from": "redis-commands@>=1.2.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.3.1.tgz"
- },
- "redis-parser": {
- "version": "2.6.0",
- "from": "redis-parser@>=2.6.0 <3.0.0",
- "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-2.6.0.tgz"
- }
- }
- },
- "rimraf": {
- "version": "2.2.8",
- "from": "rimraf@>=2.2.6 <2.3.0",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz"
- },
- "settings-sharelatex": {
- "version": "1.0.0",
- "from": "git+https://github.com/sharelatex/settings-sharelatex.git",
- "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#b4fb8404c5de571d029bf4c29e96a60b21206f94",
- "dependencies": {
- "coffee-script": {
- "version": "1.6.0",
- "from": "coffee-script@1.6.0",
- "resolved": "http://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz"
- }
- }
- },
- "underscore": {
- "version": "1.8.3",
- "from": "underscore@>=1.7.0 <2.0.0",
- "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz"
- }
- }
-}
diff --git a/package.json b/package.json
deleted file mode 100644
index 4b32165fcd..0000000000
--- a/package.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "name": "sharelatex",
- "version": "0.0.1",
- "description": "An online collaborative LaTeX editor",
- "dependencies": {
- "async": "^0.9.0",
- "bson": "^1.0.4",
- "coffee-script": "^1.11.1",
- "east": "0.5.7",
- "east-mongo": "0.3.3",
- "grunt-shell": "^1.1.1",
- "load-grunt-config": "^0.19.2",
- "lodash": "^3.0.0",
- "mongodb": "^2.2.34",
- "mongojs": "2.4.0",
- "redis": "^2.6.2",
- "rimraf": "~2.2.6",
- "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git",
- "underscore": "^1.7.0"
- },
- "devDependencies": {
- "grunt": "~0.4.2",
- "bunyan": "~0.22.1",
- "grunt-bunyan": "~0.5.0",
- "grunt-execute": "~0.1.5",
- "grunt-available-tasks": "~0.4.1",
- "grunt-concurrent": "~0.4.3",
- "grunt-contrib-coffee": "~0.10.1",
- "semver": "~2.2.1",
- "knox": "~0.8.9"
- }
-}
diff --git a/runit/chat-sharelatex/run b/runit/chat-sharelatex/run
index cc5f75057f..c000f7d80d 100755
--- a/runit/chat-sharelatex/run
+++ b/runit/chat-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
diff --git a/runit/clsi-sharelatex/run b/runit/clsi-sharelatex/run
index c1469b6cfe..e8e7bbaf4c 100755
--- a/runit/clsi-sharelatex/run
+++ b/runit/clsi-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
diff --git a/runit/contacts-sharelatex/run b/runit/contacts-sharelatex/run
index e220d9ac1d..8de491ac6a 100755
--- a/runit/contacts-sharelatex/run
+++ b/runit/contacts-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
@@ -7,4 +6,4 @@ if [ "$DEBUG_NODE" == "true" ]; then
NODE_PARAMS="--inspect=0.0.0.0:30360"
fi
-exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts 2>&1
+exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts.log 2>&1
diff --git a/runit/docstore-sharelatex/run b/runit/docstore-sharelatex/run
index 2a171f0968..f6b3285358 100755
--- a/runit/docstore-sharelatex/run
+++ b/runit/docstore-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
diff --git a/runit/document-updater-sharelatex/run b/runit/document-updater-sharelatex/run
index 51472b3d48..7d688a17de 100755
--- a/runit/document-updater-sharelatex/run
+++ b/runit/document-updater-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
diff --git a/runit/filestore-sharelatex/run b/runit/filestore-sharelatex/run
index 4237a38793..8baccbfe0b 100755
--- a/runit/filestore-sharelatex/run
+++ b/runit/filestore-sharelatex/run
@@ -1,3 +1,2 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
exec /sbin/setuser www-data /usr/bin/node /var/www/sharelatex/filestore/app.js >> /var/log/sharelatex/filestore.log 2>&1
diff --git a/runit/notifications-sharelatex/run b/runit/notifications-sharelatex/run
index 89f8ad54f9..721b1cf1e9 100755
--- a/runit/notifications-sharelatex/run
+++ b/runit/notifications-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
diff --git a/runit/real-time-sharelatex/run b/runit/real-time-sharelatex/run
index ad005b624c..392c4525ef 100755
--- a/runit/real-time-sharelatex/run
+++ b/runit/real-time-sharelatex/run
@@ -1,3 +1,2 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
exec /sbin/setuser www-data /usr/bin/node /var/www/sharelatex/real-time/app.js >> /var/log/sharelatex/real-time.log 2>&1
diff --git a/runit/spelling-sharelatex/run b/runit/spelling-sharelatex/run
index a9a73f8ae0..af7941a4b4 100755
--- a/runit/spelling-sharelatex/run
+++ b/runit/spelling-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
diff --git a/runit/track-changes-sharelatex/run b/runit/track-changes-sharelatex/run
index 45b3b77ebc..a137098588 100755
--- a/runit/track-changes-sharelatex/run
+++ b/runit/track-changes-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
diff --git a/runit/web-sharelatex/run b/runit/web-sharelatex/run
index c1371de0f4..15fba8f806 100755
--- a/runit/web-sharelatex/run
+++ b/runit/web-sharelatex/run
@@ -1,5 +1,4 @@
#!/bin/bash
-export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then
diff --git a/services.js b/services.js
index 84fd7622f5..c47186a159 100644
--- a/services.js
+++ b/services.js
@@ -1,47 +1,63 @@
-module.exports =
+module.exports = [
+ {
+ name: 'web',
+ repo: 'https://github.com/sharelatex/web-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'real-time',
+ repo: 'https://github.com/sharelatex/real-time-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'document-updater',
+ repo: 'https://github.com/sharelatex/document-updater-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'clsi',
+ repo: 'https://github.com/sharelatex/clsi-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'filestore',
+ repo: 'https://github.com/sharelatex/filestore-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'track-changes',
+ repo: 'https://github.com/sharelatex/track-changes-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'docstore',
+ repo: 'https://github.com/sharelatex/docstore-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'chat',
+ repo: 'https://github.com/sharelatex/chat-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'spelling',
+ repo: 'https://github.com/sharelatex/spelling-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'contacts',
+ repo: 'https://github.com/sharelatex/contacts-sharelatex.git',
+ version: 'master',
+ },
+ {
+ name: 'notifications',
+ repo: 'https://github.com/sharelatex/notifications-sharelatex.git',
+ version: 'master',
+ },
+]
-[{
- name: "web",
- repo: "https://github.com/sharelatex/web-sharelatex.git",
- version: "master"
-}, {
- name: "real-time",
- repo: "https://github.com/sharelatex/real-time-sharelatex.git",
- version: "master"
-}, {
- name: "document-updater",
- repo: "https://github.com/sharelatex/document-updater-sharelatex.git",
- version: "master"
-}, {
- name: "clsi",
- repo: "https://github.com/sharelatex/clsi-sharelatex.git",
- version: "master"
-}, {
- name: "filestore",
- repo: "https://github.com/sharelatex/filestore-sharelatex.git",
- version: "master"
-}, {
- name: "track-changes",
- repo: "https://github.com/sharelatex/track-changes-sharelatex.git",
- version: "master"
-}, {
- name: "docstore",
- repo: "https://github.com/sharelatex/docstore-sharelatex.git",
- version: "master"
-}, {
- name: "chat",
- repo: "https://github.com/sharelatex/chat-sharelatex.git",
- version: "master"
-}, {
- name: "spelling",
- repo: "https://github.com/sharelatex/spelling-sharelatex.git",
- version: "master"
-}, {
- name: "contacts",
- repo: "https://github.com/sharelatex/contacts-sharelatex.git",
- version: "master"
-}, {
- name: "notifications",
- repo: "https://github.com/sharelatex/notifications-sharelatex.git",
- version: "master"
-}]
+if (require.main === module) {
+ for (const service of module.exports) {
+ console.log(service.name)
+ }
+}
diff --git a/settings.coffee b/settings.coffee
deleted file mode 100644
index ef2fdd4115..0000000000
--- a/settings.coffee
+++ /dev/null
@@ -1,565 +0,0 @@
-Path = require('path')
-
-# These credentials are used for authenticating api requests
-# between services that may need to go over public channels
-httpAuthUser = "sharelatex"
-httpAuthPass = process.env["WEB_API_PASSWORD"]
-httpAuthUsers = {}
-httpAuthUsers[httpAuthUser] = httpAuthPass
-
-parse = (option)->
- if option?
- try
- opt = JSON.parse(option)
- return opt
- catch err
- console.error "problem parsing #{option}, invalid JSON"
- return undefined
-
-
-DATA_DIR = '/var/lib/sharelatex/data'
-TMP_DIR = '/var/lib/sharelatex/tmp'
-
-settings =
-
- clsi:
- optimiseInDocker: process.env['OPTIMISE_PDF'] == 'true'
-
- brandPrefix: ""
-
- allowAnonymousReadAndWriteSharing:
- process.env['SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING'] == 'true'
-
- # Databases
- # ---------
-
- # ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
- # Documentation about the URL connection string format can be found at:
- #
- # http://docs.mongodb.org/manual/reference/connection-string/
- #
- # The following works out of the box with Mongo's default settings:
- mongo:
- url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
-
- # Redis is used in ShareLaTeX for high volume queries, like real-time
- # editing, and session management.
- #
- # The following config will work with Redis's default settings:
- redis:
- web: redisConfig =
- host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
- port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
- password: process.env["SHARELATEX_REDIS_PASS"] or ""
- key_schema:
- # document-updater
- blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
- docLines: ({doc_id}) -> "doclines:#{doc_id}"
- docOps: ({doc_id}) -> "DocOps:#{doc_id}"
- docVersion: ({doc_id}) -> "DocVersion:#{doc_id}"
- docHash: ({doc_id}) -> "DocHash:#{doc_id}"
- projectKey: ({doc_id}) -> "ProjectId:#{doc_id}"
- docsInProject: ({project_id}) -> "DocsIn:#{project_id}"
- ranges: ({doc_id}) -> "Ranges:#{doc_id}"
- # document-updater:realtime
- pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}"
- # document-updater:history
- uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
- docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
- # document-updater:lock
- blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
- # track-changes:lock
- historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}"
- historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}"
- # track-chanegs:history
- uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
- docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
- # realtime
- clientsInProject: ({project_id}) -> "clients_in_project:#{project_id}"
- connectedUser: ({project_id, client_id})-> "connected_user:#{project_id}:#{client_id}"
- fairy: redisConfig
- # track-changes and document-updater
- realtime: redisConfig
- documentupdater: redisConfig
- lock: redisConfig
- history: redisConfig
- websessions: redisConfig
- api: redisConfig
- pubsub: redisConfig
- project_history: redisConfig
-
- # The compile server (the clsi) uses a SQL database to cache files and
- # meta-data. sqllite is the default, and the load is low enough that this will
- # be fine in production (we use sqllite at sharelatex.com).
- #
- # If you want to configure a different database, see the Sequelize documentation
- # for available options:
- #
- # https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
- #
- mysql:
- clsi:
- database: "clsi"
- username: "clsi"
- password: ""
- dialect: "sqlite"
- storage: Path.join(DATA_DIR, "db.sqlite")
-
- # File storage
- # ------------
-
- # ShareLaTeX can store binary files like images either locally or in Amazon
- # S3. The default is locally:
- filestore:
- backend: "fs"
- stores:
- user_files: Path.join(DATA_DIR, "user_files")
- template_files: Path.join(DATA_DIR, "template_files")
-
- # To use Amazon S3 as a storage backend, comment out the above config, and
- # uncomment the following, filling in your key, secret, and bucket name:
- #
- # filestore:
- # backend: "s3"
- # stores:
- # user_files: "BUCKET_NAME"
- # s3:
- # key: "AWS_KEY"
- # secret: "AWS_SECRET"
- #
-
- trackchanges:
- continueOnError: true
-
- # Local disk caching
- # ------------------
- path:
- # If we ever need to write something to disk (e.g. incoming requests
- # that need processing but may be too big for memory), then write
- # them to disk here:
- dumpFolder: Path.join(TMP_DIR, "dumpFolder")
- # Where to write uploads before they are processed
- uploadFolder: Path.join(TMP_DIR, "uploads")
- # Where to write the project to disk before running LaTeX on it
- compilesDir: Path.join(DATA_DIR, "compiles")
- # Where to cache downloaded URLs for the CLSI
- clsiCacheDir: Path.join(DATA_DIR, "cache")
-
- # Server Config
- # -------------
-
- # Where your instance of ShareLaTeX can be found publicly. This is used
- # when emails are sent out and in generated links:
- siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
-
- # The name this is used to describe your ShareLaTeX Installation
- appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
-
- restrictInvitesToExistingAccounts: process.env["SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS"] == 'true'
-
- nav:
- title: process.env["SHARELATEX_NAV_TITLE"] or process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX Community Edition"
-
-
- # The email address which users will be directed to as the main point of
- # contact for this installation of ShareLaTeX.
- adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "placeholder@example.com"
-
- # If provided, a sessionSecret is used to sign cookies so that they cannot be
- # spoofed. This is recommended.
- security:
- sessionSecret: process.env["SHARELATEX_SESSION_SECRET"] or process.env["CRYPTO_RANDOM"]
-
- # These credentials are used for authenticating api requests
- # between services that may need to go over public channels
- httpAuthUsers: httpAuthUsers
-
- # Should javascript assets be served minified or not.
- useMinifiedJs: true
-
- # Should static assets be sent with a header to tell the browser to cache
- # them. This should be false in development where changes are being made,
- # but should be set to true in production.
- cacheStaticAssets: true
-
- # If you are running ShareLaTeX over https, set this to true to send the
- # cookie with a secure flag (recommended).
- secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
-
- # If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
- # then set this to true to allow it to correctly detect the forwarded IP
- # address and http/https protocol information.
-
- behindProxy: process.env["SHARELATEX_BEHIND_PROXY"] or false
-
- i18n:
- subdomainLang:
- www: {lngCode:process.env["SHARELATEX_SITE_LANGUAGE"] or "en", url: siteUrl}
- defaultLng: process.env["SHARELATEX_SITE_LANGUAGE"] or "en"
-
- currentImageName: process.env["TEX_LIVE_DOCKER_IMAGE"]
-
- apis:
- web:
- url: "http://localhost:3000"
- user: httpAuthUser
- pass: httpAuthPass
- project_history:
- enabled: false
- references:{}
- notifications:undefined
-
- defaultFeatures:
- collaborators: -1
- dropbox: true
- versioning: true
- compileTimeout: 180
- compileGroup: "standard"
- trackChanges: true
- templates: true
- references: true
-
-## OPTIONAL CONFIGERABLE SETTINGS
-
-if process.env["SHARELATEX_LEFT_FOOTER"]?
- try
- settings.nav.left_footer = JSON.parse(process.env["SHARELATEX_LEFT_FOOTER"])
- catch e
- console.error("could not parse SHARELATEX_LEFT_FOOTER, not valid JSON")
-
-if process.env["SHARELATEX_RIGHT_FOOTER"]?
- settings.nav.right_footer = process.env["SHARELATEX_RIGHT_FOOTER"]
- try
- settings.nav.right_footer = JSON.parse(process.env["SHARELATEX_RIGHT_FOOTER"])
- catch e
- console.error("could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON")
-
-if process.env["SHARELATEX_HEADER_IMAGE_URL"]?
- settings.nav.custom_logo = process.env["SHARELATEX_HEADER_IMAGE_URL"]
-
-if process.env["SHARELATEX_HEADER_NAV_LINKS"]?
- console.error """
-# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-#
-# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
-# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
-#
-# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-"""
-
-if process.env["SHARELATEX_HEADER_EXTRAS"]?
- try
- settings.nav.header_extras = JSON.parse(process.env["SHARELATEX_HEADER_EXTRAS"])
- catch e
- console.error("could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON")
-
-
-
-# Sending Email
-# -------------
-#
-# You must configure a mail server to be able to send invite emails from
-# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
-# documentation for available options:
-#
-# http://www.nodemailer.com/docs/transports
-
-
-if process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]?
-
- settings.email =
- fromAddress: process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]
- replyTo: process.env["SHARELATEX_EMAIL_REPLY_TO"] or ""
- driver: process.env["SHARELATEX_EMAIL_DRIVER"]
- parameters:
- #AWS Creds
- AWSAccessKeyID: process.env["SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID"]
- AWSSecretKey: process.env["SHARELATEX_EMAIL_AWS_SES_SECRET_KEY"]
-
- #SMTP Creds
- host: process.env["SHARELATEX_EMAIL_SMTP_HOST"]
- port: process.env["SHARELATEX_EMAIL_SMTP_PORT"],
- secure: parse(process.env["SHARELATEX_EMAIL_SMTP_SECURE"])
- ignoreTLS: parse(process.env["SHARELATEX_EMAIL_SMTP_IGNORE_TLS"])
-
- textEncoding: process.env["SHARELATEX_EMAIL_TEXT_ENCODING"]
- templates:
- customFooter: process.env["SHARELATEX_CUSTOM_EMAIL_FOOTER"]
-
- if process.env["SHARELATEX_EMAIL_SMTP_USER"]? or process.env["SHARELATEX_EMAIL_SMTP_PASS"]?
- settings.email.parameters.auth =
- user: process.env["SHARELATEX_EMAIL_SMTP_USER"]
- pass: process.env["SHARELATEX_EMAIL_SMTP_PASS"]
-
- if process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"]?
- settings.email.parameters.tls =
- rejectUnauthorized: parse(process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"])
-
-
-# i18n
-if process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]?
-
- settings.i18n.subdomainLang = parse(process.env["SHARELATEX_LANG_DOMAIN_MAPPING"])
-
-# Password Settings
-# -----------
-# These restrict the passwords users can use when registering
-# opts are from http://antelle.github.io/passfield
-if process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"]
-
- settings.passwordStrengthOptions =
- pattern: process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or "aA$3"
- length: {min:process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or 8, max: process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] or 150}
-
-
-
-
-#######################
-# ShareLaTeX Server Pro
-#######################
-
-if parse(process.env["SHARELATEX_IS_SERVER_PRO"]) == true
- settings.bypassPercentageRollouts = true
- settings.apis.references =
- url: "http://localhost:3040"
-
-
-# LDAP - SERVER PRO ONLY
-# ----------
-
-if process.env["SHARELATEX_LDAP_HOST"]
- console.error """
-# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-#
-# WARNING: The LDAP configuration format has changed in version 0.5.1
-# See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config
-#
-# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-"""
-
-if process.env["SHARELATEX_LDAP_URL"]
- settings.externalAuth = true
- settings.ldap =
- emailAtt: process.env["SHARELATEX_LDAP_EMAIL_ATT"]
- nameAtt: process.env["SHARELATEX_LDAP_NAME_ATT"]
- lastNameAtt: process.env["SHARELATEX_LDAP_LAST_NAME_ATT"]
- updateUserDetailsOnLogin: process.env["SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
- placeholder: process.env["SHARELATEX_LDAP_PLACEHOLDER"]
- server:
- url: process.env["SHARELATEX_LDAP_URL"]
- bindDn: process.env["SHARELATEX_LDAP_BIND_DN"]
- bindCredentials: process.env["SHARELATEX_LDAP_BIND_CREDENTIALS"]
- bindProperty: process.env["SHARELATEX_LDAP_BIND_PROPERTY"]
- searchBase: process.env["SHARELATEX_LDAP_SEARCH_BASE"]
- searchScope: process.env["SHARELATEX_LDAP_SEARCH_SCOPE"]
- searchFilter: process.env["SHARELATEX_LDAP_SEARCH_FILTER"]
- searchAttributes: (
- if _ldap_search_attribs = process.env["SHARELATEX_LDAP_SEARCH_ATTRIBUTES"]
- try
- JSON.parse(_ldap_search_attribs)
- catch e
- console.error "could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES"
- else
- undefined
- )
- groupDnProperty: process.env["SHARELATEX_LDAP_GROUP_DN_PROPERTY"]
- groupSearchBase: process.env["SHARELATEX_LDAP_GROUP_SEARCH_BASE"]
- groupSearchScope: process.env["SHARELATEX_LDAP_GROUP_SEARCH_SCOPE"]
- groupSearchFilter: process.env["SHARELATEX_LDAP_GROUP_SEARCH_FILTER"]
- groupSearchAttributes: (
- if _ldap_group_search_attribs = process.env["SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"]
- try
- JSON.parse(_ldap_group_search_attribs)
- catch e
- console.error "could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"
- else
- undefined
- )
- cache: process.env["SHARELATEX_LDAP_CACHE"] == 'true'
- timeout: (
- if _ldap_timeout = process.env["SHARELATEX_LDAP_TIMEOUT"]
- try
- parseInt(_ldap_timeout)
- catch e
- console.error "Cannot parse SHARELATEX_LDAP_TIMEOUT"
- else
- undefined
- )
- connectTimeout: (
- if _ldap_connect_timeout = process.env["SHARELATEX_LDAP_CONNECT_TIMEOUT"]
- try
- parseInt(_ldap_connect_timeout)
- catch e
- console.error "Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT"
- else
- undefined
- )
-
- if process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]
- try
- ca = JSON.parse(process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"])
- catch e
- console.error "could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON"
-
- if typeof(ca) == 'string'
- ca_paths = [ca]
- else if typeof(ca) == 'object' && ca?.length?
- ca_paths = ca
- else
- console.error "problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH"
-
- settings.ldap.server.tlsOptions =
- rejectUnauthorized: process.env["SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH"] == "true"
- ca:ca_paths # e.g.'/etc/ldap/ca_certs.pem'
-
-
-
-
-
-if process.env["SHARELATEX_SAML_ENTRYPOINT"]
- # NOTE: see https://github.com/bergie/passport-saml/blob/master/README.md for docs of `server` options
- settings.externalAuth = true
- settings.saml =
- updateUserDetailsOnLogin: process.env["SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
- identityServiceName: process.env["SHARELATEX_SAML_IDENTITY_SERVICE_NAME"]
- emailField: process.env["SHARELATEX_SAML_EMAIL_FIELD"] || process.env["SHARELATEX_SAML_EMAIL_FIELD_NAME"]
- firstNameField: process.env["SHARELATEX_SAML_FIRST_NAME_FIELD"]
- lastNameField: process.env["SHARELATEX_SAML_LAST_NAME_FIELD"]
- server:
- # strings
- entryPoint: process.env["SHARELATEX_SAML_ENTRYPOINT"]
- callbackUrl: process.env["SHARELATEX_SAML_CALLBACK_URL"]
- issuer: process.env["SHARELATEX_SAML_ISSUER"]
- decryptionPvk: process.env["SHARELATEX_SAML_DECRYPTION_PVK"]
- signatureAlgorithm: process.env["SHARELATEX_SAML_SIGNATURE_ALGORITHM"]
- identifierFormat: process.env["SHARELATEX_SAML_IDENTIFIER_FORMAT"]
- attributeConsumingServiceIndex: process.env["SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX"]
- authnContext: process.env["SHARELATEX_SAML_AUTHN_CONTEXT"]
- authnRequestBinding: process.env["SHARELATEX_SAML_AUTHN_REQUEST_BINDING"]
- validateInResponseTo: process.env["SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO"]
- cacheProvider: process.env["SHARELATEX_SAML_CACHE_PROVIDER"]
- logoutUrl: process.env["SHARELATEX_SAML_LOGOUT_URL"]
- logoutCallbackUrl: process.env["SHARELATEX_SAML_LOGOUT_CALLBACK_URL"]
- disableRequestedAuthnContext: process.env["SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT"] == 'true'
- forceAuthn: process.env["SHARELATEX_SAML_FORCE_AUTHN"] == 'true'
- skipRequestCompression: process.env["SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION"] == 'true'
- acceptedClockSkewMs: (
- if _saml_skew = process.env["SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"]
- try
- parseInt(_saml_skew)
- catch e
- console.error "Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"
- else
- undefined
- )
- requestIdExpirationPeriodMs: (
- if _saml_exiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]
- try
- parseInt(_saml_expiration)
- catch e
- console.error "Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"
- else
- undefined
- )
- additionalParams: (
- if _saml_additionalParams = process.env["SHARELATEX_SAML_ADDITIONAL_PARAMS"]
- try
- JSON.parse(_saml_additionalParams)
- catch e
- console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS"
- else
- undefined
- )
- additionalAuthorizeParams: (
- if _saml_additionalAuthorizeParams = process.env["SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"]
- try
- JSON.parse(_saml_additionalAuthorizeParams )
- catch e
- console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"
- else
- undefined
- )
- additionalLogoutParams: (
- if _saml_additionalLogoutParams = process.env["SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"]
- try
- JSON.parse(_saml_additionalLogoutParams )
- catch e
- console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"
- else
- undefined
- )
-
- # SHARELATEX_SAML_CERT cannot be empty
- # https://github.com/bergie/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102
- if process.env["SHARELATEX_SAML_CERT"]
- settings.saml.server.cert = process.env["SHARELATEX_SAML_CERT"]
- settings.saml.server.privateCert = process.env["SHARELATEX_SAML_PRIVATE_CERT"]
-
-# Compiler
-# --------
-if process.env["SANDBOXED_COMPILES"] == "true"
- settings.clsi =
- dockerRunner: true
- docker:
- image: process.env["TEX_LIVE_DOCKER_IMAGE"]
- env:
- HOME: "/tmp"
- PATH: process.env["COMPILER_PATH"] or "/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
- user: "www-data"
-
- if !settings.path?
- settings.path = {}
- settings.path.synctexBaseDir = () -> "/compile"
- if process.env['SANDBOXED_COMPILES_SIBLING_CONTAINERS'] == 'true'
- console.log("Using sibling containers for sandboxed compiles")
- if process.env['SANDBOXED_COMPILES_HOST_DIR']
- settings.path.sandboxedCompilesHostDir = process.env['SANDBOXED_COMPILES_HOST_DIR']
- else
- console.error('Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set')
-
-
-# Templates
-# ---------
-if process.env["SHARELATEX_TEMPLATES_USER_ID"]
- settings.templates =
- mountPointUrl: "/templates"
- user_id: process.env["SHARELATEX_TEMPLATES_USER_ID"]
-
- settings.templateLinks = parse(process.env["SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS"])
-
-
-# /Learn
-# -------
-if process.env["SHARELATEX_PROXY_LEARN"]?
- settings.proxyLearn = parse(process.env["SHARELATEX_PROXY_LEARN"])
-
-
-# /References
-# -----------
-if process.env["SHARELATEX_ELASTICSEARCH_URL"]?
- settings.references.elasticsearch =
- host: process.env["SHARELATEX_ELASTICSEARCH_URL"]
-
-# TeX Live Images
-# -----------
-if process.env["ALL_TEX_LIVE_DOCKER_IMAGES"]?
- allTexLiveDockerImages = process.env["ALL_TEX_LIVE_DOCKER_IMAGES"].split(',')
-if process.env["ALL_TEX_LIVE_DOCKER_IMAGE_NAMES"]?
- allTexLiveDockerImageNames = process.env["ALL_TEX_LIVE_DOCKER_IMAGE_NAMES"].split(',')
-if allTexLiveDockerImages?
- settings.allowedImageNames = []
- for fullImageName, index in allTexLiveDockerImages
- imageName = Path.basename(fullImageName)
- imageDesc = if allTexLiveDockerImageNames? then allTexLiveDockerImageNames[index] else imageName
- settings.allowedImageNames.push({ imageName, imageDesc })
-
-# With lots of incoming and outgoing HTTP connections to different services,
-# sometimes long running, it is a good idea to increase the default number
-# of sockets that Node will hold open.
-http = require('http')
-http.globalAgent.maxSockets = 300
-https = require('https')
-https.globalAgent.maxSockets = 300
-
-module.exports = settings
-
diff --git a/settings.js b/settings.js
new file mode 100644
index 0000000000..eb5a5f015e
--- /dev/null
+++ b/settings.js
@@ -0,0 +1,778 @@
+/* eslint-disable
+ camelcase,
+ no-cond-assign,
+ no-dupe-keys,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let allTexLiveDockerImageNames, allTexLiveDockerImages, redisConfig, siteUrl
+let e
+const Path = require('path')
+
+// These credentials are used for authenticating api requests
+// between services that may need to go over public channels
+const httpAuthUser = 'sharelatex'
+const httpAuthPass = process.env.WEB_API_PASSWORD
+const httpAuthUsers = {}
+httpAuthUsers[httpAuthUser] = httpAuthPass
+
+const parse = function (option) {
+ if (option != null) {
+ try {
+ const opt = JSON.parse(option)
+ return opt
+ } catch (err) {
+ throw new Error(`problem parsing ${option}, invalid JSON`)
+ }
+ }
+}
+
+const parseIntOrFail = function (value) {
+ const parsedValue = parseInt(value, 10)
+ if (isNaN(parsedValue)) {
+ throw new Error(`'${value}' is an invalid integer`)
+ }
+ return parsedValue
+}
+
+const DATA_DIR = '/var/lib/sharelatex/data'
+const TMP_DIR = '/var/lib/sharelatex/tmp'
+
+const settings = {
+ clsi: {
+ optimiseInDocker: process.env.OPTIMISE_PDF === 'true',
+ },
+
+ brandPrefix: '',
+
+ allowAnonymousReadAndWriteSharing:
+ process.env.SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING === 'true',
+
+ // Databases
+ // ---------
+
+ // ShareLaTeX's main persistent data store is MongoDB (http://www.mongodb.org/)
+ // Documentation about the URL connection string format can be found at:
+ //
+ // http://docs.mongodb.org/manual/reference/connection-string/
+ //
+ // The following works out of the box with Mongo's default settings:
+ mongo: {
+ url: process.env.SHARELATEX_MONGO_URL || 'mongodb://dockerhost/sharelatex',
+ },
+
+ // Redis is used in ShareLaTeX for high volume queries, like real-time
+ // editing, and session management.
+ //
+ // The following config will work with Redis's default settings:
+ redis: {
+ web: (redisConfig = {
+ host: process.env.SHARELATEX_REDIS_HOST || 'dockerhost',
+ port: process.env.SHARELATEX_REDIS_PORT || '6379',
+ password: process.env.SHARELATEX_REDIS_PASS || undefined,
+ key_schema: {
+ // document-updater
+ blockingKey({ doc_id }) {
+ return `Blocking:${doc_id}`
+ },
+ docLines({ doc_id }) {
+ return `doclines:${doc_id}`
+ },
+ docOps({ doc_id }) {
+ return `DocOps:${doc_id}`
+ },
+ docVersion({ doc_id }) {
+ return `DocVersion:${doc_id}`
+ },
+ docHash({ doc_id }) {
+ return `DocHash:${doc_id}`
+ },
+ projectKey({ doc_id }) {
+ return `ProjectId:${doc_id}`
+ },
+ docsInProject({ project_id }) {
+ return `DocsIn:${project_id}`
+ },
+ ranges({ doc_id }) {
+ return `Ranges:${doc_id}`
+ },
+ // document-updater:realtime
+ pendingUpdates({ doc_id }) {
+ return `PendingUpdates:${doc_id}`
+ },
+ // document-updater:history
+ uncompressedHistoryOps({ doc_id }) {
+ return `UncompressedHistoryOps:${doc_id}`
+ },
+ docsWithHistoryOps({ project_id }) {
+ return `DocsWithHistoryOps:${project_id}`
+ },
+ // document-updater:lock
+ blockingKey({ doc_id }) {
+ return `Blocking:${doc_id}`
+ },
+ // track-changes:lock
+ historyLock({ doc_id }) {
+ return `HistoryLock:${doc_id}`
+ },
+ historyIndexLock({ project_id }) {
+ return `HistoryIndexLock:${project_id}`
+ },
+ // track-changes:history
+ uncompressedHistoryOps({ doc_id }) {
+ return `UncompressedHistoryOps:${doc_id}`
+ },
+ docsWithHistoryOps({ project_id }) {
+ return `DocsWithHistoryOps:${project_id}`
+ },
+ // realtime
+ clientsInProject({ project_id }) {
+ return `clients_in_project:${project_id}`
+ },
+ connectedUser({ project_id, client_id }) {
+ return `connected_user:${project_id}:${client_id}`
+ },
+ },
+ }),
+ fairy: redisConfig,
+ // track-changes and document-updater
+ realtime: redisConfig,
+ documentupdater: redisConfig,
+ lock: redisConfig,
+ history: redisConfig,
+ websessions: redisConfig,
+ api: redisConfig,
+ pubsub: redisConfig,
+ project_history: redisConfig,
+ },
+
+ // The compile server (the clsi) uses a SQL database to cache files and
+ // meta-data. sqlite is the default, and the load is low enough that this will
+ // be fine in production (we use sqlite at sharelatex.com).
+ //
+ // If you want to configure a different database, see the Sequelize documentation
+ // for available options:
+ //
+ // https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
+ //
+ mysql: {
+ clsi: {
+ database: 'clsi',
+ username: 'clsi',
+ password: '',
+ dialect: 'sqlite',
+ storage: Path.join(DATA_DIR, 'db.sqlite'),
+ },
+ },
+
+ // File storage
+ // ------------
+
+ // ShareLaTeX can store binary files like images either locally or in Amazon
+ // S3. The default is locally:
+ filestore: {
+ backend: 'fs',
+ stores: {
+ user_files: Path.join(DATA_DIR, 'user_files'),
+ template_files: Path.join(DATA_DIR, 'template_files'),
+ },
+ },
+
+ // To use Amazon S3 as a storage backend, comment out the above config, and
+ // uncomment the following, filling in your key, secret, and bucket name:
+ //
+ // filestore:
+ // backend: "s3"
+ // stores:
+ // user_files: "BUCKET_NAME"
+ // s3:
+ // key: "AWS_KEY"
+ // secret: "AWS_SECRET"
+ //
+
+ trackchanges: {
+ continueOnError: true,
+ },
+
+ // Local disk caching
+ // ------------------
+ path: {
+ // If we ever need to write something to disk (e.g. incoming requests
+ // that need processing but may be too big for memory), then write
+ // them to disk here:
+ dumpFolder: Path.join(TMP_DIR, 'dumpFolder'),
+ // Where to write uploads before they are processed
+ uploadFolder: Path.join(TMP_DIR, 'uploads'),
+ // Where to write the project to disk before running LaTeX on it
+ compilesDir: Path.join(DATA_DIR, 'compiles'),
+ // Where to cache downloaded URLs for the CLSI
+ clsiCacheDir: Path.join(DATA_DIR, 'cache'),
+ // Where to write the output files to disk after running LaTeX
+ outputDir: Path.join(DATA_DIR, 'output'),
+ },
+
+ // Server Config
+ // -------------
+
+ // Where your instance of ShareLaTeX can be found publicly. This is used
+ // when emails are sent out and in generated links:
+ siteUrl: (siteUrl = process.env.SHARELATEX_SITE_URL || 'http://localhost'),
+
+ // The name this is used to describe your ShareLaTeX Installation
+ appName: process.env.SHARELATEX_APP_NAME || 'ShareLaTeX (Community Edition)',
+
+ restrictInvitesToExistingAccounts:
+ process.env.SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS === 'true',
+
+ nav: {
+ title:
+ process.env.SHARELATEX_NAV_TITLE ||
+ process.env.SHARELATEX_APP_NAME ||
+ 'ShareLaTeX Community Edition',
+ },
+
+ // The email address which users will be directed to as the main point of
+ // contact for this installation of ShareLaTeX.
+ adminEmail: process.env.SHARELATEX_ADMIN_EMAIL || 'placeholder@example.com',
+
+ // If provided, a sessionSecret is used to sign cookies so that they cannot be
+ // spoofed. This is recommended.
+ security: {
+ sessionSecret:
+ process.env.SHARELATEX_SESSION_SECRET || process.env.CRYPTO_RANDOM,
+ },
+
+ // These credentials are used for authenticating api requests
+ // between services that may need to go over public channels
+ httpAuthUsers,
+
+ // Should javascript assets be served minified or not.
+ useMinifiedJs: true,
+
+ // Should static assets be sent with a header to tell the browser to cache
+ // them. This should be false in development where changes are being made,
+ // but should be set to true in production.
+ cacheStaticAssets: true,
+
+ // If you are running ShareLaTeX over https, set this to true to send the
+ // cookie with a secure flag (recommended).
+ secureCookie: process.env.SHARELATEX_SECURE_COOKIE != null,
+
+ // If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
+ // then set this to true to allow it to correctly detect the forwarded IP
+ // address and http/https protocol information.
+
+ behindProxy: process.env.SHARELATEX_BEHIND_PROXY || false,
+
+ i18n: {
+ subdomainLang: {
+ www: {
+ lngCode: process.env.SHARELATEX_SITE_LANGUAGE || 'en',
+ url: siteUrl,
+ },
+ },
+ defaultLng: process.env.SHARELATEX_SITE_LANGUAGE || 'en',
+ },
+
+ currentImageName: process.env.TEX_LIVE_DOCKER_IMAGE,
+
+ apis: {
+ web: {
+ url: 'http://localhost:3000',
+ user: httpAuthUser,
+ pass: httpAuthPass,
+ },
+ project_history: {
+ enabled: false,
+ },
+ },
+ references: {},
+ notifications: undefined,
+
+ defaultFeatures: {
+ collaborators: -1,
+ dropbox: true,
+ versioning: true,
+ compileTimeout: parseIntOrFail(process.env.COMPILE_TIMEOUT || 180),
+ compileGroup: 'standard',
+ trackChanges: true,
+ templates: true,
+ references: true,
+ },
+}
+
+// # OPTIONAL CONFIGURABLE SETTINGS
+
+if (process.env.SHARELATEX_LEFT_FOOTER != null) {
+ try {
+ settings.nav.left_footer = JSON.parse(process.env.SHARELATEX_LEFT_FOOTER)
+ } catch (error) {
+ e = error
+ console.error('could not parse SHARELATEX_LEFT_FOOTER, not valid JSON')
+ }
+}
+
+if (process.env.SHARELATEX_RIGHT_FOOTER != null) {
+ settings.nav.right_footer = process.env.SHARELATEX_RIGHT_FOOTER
+ try {
+ settings.nav.right_footer = JSON.parse(process.env.SHARELATEX_RIGHT_FOOTER)
+ } catch (error1) {
+ e = error1
+ console.error('could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON')
+ }
+}
+
+if (process.env.SHARELATEX_HEADER_IMAGE_URL != null) {
+ settings.nav.custom_logo = process.env.SHARELATEX_HEADER_IMAGE_URL
+}
+
+if (process.env.SHARELATEX_HEADER_NAV_LINKS != null) {
+ console.error(`\
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
+#
+# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
+# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
+#
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\
+`)
+}
+
+if (process.env.SHARELATEX_HEADER_EXTRAS != null) {
+ try {
+ settings.nav.header_extras = JSON.parse(
+ process.env.SHARELATEX_HEADER_EXTRAS
+ )
+ } catch (error2) {
+ e = error2
+ console.error('could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON')
+ }
+}
+
+// Sending Email
+// -------------
+//
+// You must configure a mail server to be able to send invite emails from
+// ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
+// documentation for available options:
+//
+// http://www.nodemailer.com/docs/transports
+
+if (process.env.SHARELATEX_EMAIL_FROM_ADDRESS != null) {
+ settings.email = {
+ fromAddress: process.env.SHARELATEX_EMAIL_FROM_ADDRESS,
+ replyTo: process.env.SHARELATEX_EMAIL_REPLY_TO || '',
+ driver: process.env.SHARELATEX_EMAIL_DRIVER,
+ parameters: {
+ // AWS Creds
+ AWSAccessKeyID: process.env.SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID,
+ AWSSecretKey: process.env.SHARELATEX_EMAIL_AWS_SES_SECRET_KEY,
+
+ // SMTP Creds
+ host: process.env.SHARELATEX_EMAIL_SMTP_HOST,
+ port: process.env.SHARELATEX_EMAIL_SMTP_PORT,
+ secure: parse(process.env.SHARELATEX_EMAIL_SMTP_SECURE),
+ ignoreTLS: parse(process.env.SHARELATEX_EMAIL_SMTP_IGNORE_TLS),
+ name: process.env.SHARELATEX_EMAIL_SMTP_NAME,
+ logger: process.env.SHARELATEX_EMAIL_SMTP_LOGGER === 'true',
+ },
+
+ textEncoding: process.env.SHARELATEX_EMAIL_TEXT_ENCODING,
+ template: {
+ customFooter: process.env.SHARELATEX_CUSTOM_EMAIL_FOOTER,
+ },
+ }
+
+ if (process.env.SHARELATEX_EMAIL_AWS_SES_REGION != null) {
+ settings.email.parameters.region =
+ process.env.SHARELATEX_EMAIL_AWS_SES_REGION
+ }
+
+ if (
+ process.env.SHARELATEX_EMAIL_SMTP_USER != null ||
+ process.env.SHARELATEX_EMAIL_SMTP_PASS != null
+ ) {
+ settings.email.parameters.auth = {
+ user: process.env.SHARELATEX_EMAIL_SMTP_USER,
+ pass: process.env.SHARELATEX_EMAIL_SMTP_PASS,
+ }
+ }
+
+ if (process.env.SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH != null) {
+ settings.email.parameters.tls = {
+ rejectUnauthorized: parse(
+ process.env.SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH
+ ),
+ }
+ }
+}
+
+// i18n
+if (process.env.SHARELATEX_LANG_DOMAIN_MAPPING != null) {
+ settings.i18n.subdomainLang = parse(
+ process.env.SHARELATEX_LANG_DOMAIN_MAPPING
+ )
+}
+
+// Password Settings
+// -----------
+// These restrict the passwords users can use when registering
+// opts are from http://antelle.github.io/passfield
+if (
+ process.env.SHARELATEX_PASSWORD_VALIDATION_PATTERN ||
+ process.env.SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH ||
+ process.env.SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH
+) {
+ settings.passwordStrengthOptions = {
+ pattern: process.env.SHARELATEX_PASSWORD_VALIDATION_PATTERN || 'aA$3',
+ length: {
+ min: process.env.SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH || 8,
+ max: process.env.SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH || 150,
+ },
+ }
+}
+
+// ######################
+// ShareLaTeX Server Pro
+// ######################
+
+if (parse(process.env.SHARELATEX_IS_SERVER_PRO) === true) {
+ settings.bypassPercentageRollouts = true
+ settings.apis.references = { url: 'http://localhost:3040' }
+}
+
+// LDAP - SERVER PRO ONLY
+// ----------
+
+if (process.env.SHARELATEX_LDAP_HOST) {
+ console.error(`\
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
+#
+# WARNING: The LDAP configuration format has changed in version 0.5.1
+# See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config
+#
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\
+`)
+}
+
+if (process.env.SHARELATEX_LDAP_URL) {
+ let _ldap_connect_timeout,
+ _ldap_group_search_attribs,
+ _ldap_search_attribs,
+ _ldap_timeout
+ settings.externalAuth = true
+ settings.ldap = {
+ emailAtt: process.env.SHARELATEX_LDAP_EMAIL_ATT,
+ nameAtt: process.env.SHARELATEX_LDAP_NAME_ATT,
+ lastNameAtt: process.env.SHARELATEX_LDAP_LAST_NAME_ATT,
+ updateUserDetailsOnLogin:
+ process.env.SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN === 'true',
+ placeholder: process.env.SHARELATEX_LDAP_PLACEHOLDER,
+ server: {
+ url: process.env.SHARELATEX_LDAP_URL,
+ bindDn: process.env.SHARELATEX_LDAP_BIND_DN,
+ bindCredentials: process.env.SHARELATEX_LDAP_BIND_CREDENTIALS,
+ bindProperty: process.env.SHARELATEX_LDAP_BIND_PROPERTY,
+ searchBase: process.env.SHARELATEX_LDAP_SEARCH_BASE,
+ searchScope: process.env.SHARELATEX_LDAP_SEARCH_SCOPE,
+ searchFilter: process.env.SHARELATEX_LDAP_SEARCH_FILTER,
+ searchAttributes: (_ldap_search_attribs =
+ process.env.SHARELATEX_LDAP_SEARCH_ATTRIBUTES)
+ ? (() => {
+ try {
+ return JSON.parse(_ldap_search_attribs)
+ } catch (error3) {
+ e = error3
+ return console.error(
+ 'could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES'
+ )
+ }
+ })()
+ : undefined,
+ groupDnProperty: process.env.SHARELATEX_LDAP_GROUP_DN_PROPERTY,
+ groupSearchBase: process.env.SHARELATEX_LDAP_GROUP_SEARCH_BASE,
+ groupSearchScope: process.env.SHARELATEX_LDAP_GROUP_SEARCH_SCOPE,
+ groupSearchFilter: process.env.SHARELATEX_LDAP_GROUP_SEARCH_FILTER,
+ groupSearchAttributes: (_ldap_group_search_attribs =
+ process.env.SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES)
+ ? (() => {
+ try {
+ return JSON.parse(_ldap_group_search_attribs)
+ } catch (error4) {
+ e = error4
+ return console.error(
+ 'could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES'
+ )
+ }
+ })()
+ : undefined,
+ cache: process.env.SHARELATEX_LDAP_CACHE === 'true',
+ timeout: (_ldap_timeout = process.env.SHARELATEX_LDAP_TIMEOUT)
+ ? (() => {
+ try {
+ return parseIntOrFail(_ldap_timeout)
+ } catch (error5) {
+ e = error5
+ return console.error('Cannot parse SHARELATEX_LDAP_TIMEOUT')
+ }
+ })()
+ : undefined,
+ connectTimeout: (_ldap_connect_timeout =
+ process.env.SHARELATEX_LDAP_CONNECT_TIMEOUT)
+ ? (() => {
+ try {
+ return parseIntOrFail(_ldap_connect_timeout)
+ } catch (error6) {
+ e = error6
+ return console.error(
+ 'Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT'
+ )
+ }
+ })()
+ : undefined,
+ },
+ }
+
+ if (process.env.SHARELATEX_LDAP_TLS_OPTS_CA_PATH) {
+ let ca, ca_paths
+ try {
+ ca = JSON.parse(process.env.SHARELATEX_LDAP_TLS_OPTS_CA_PATH)
+ } catch (error7) {
+ e = error7
+ console.error(
+ 'could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON'
+ )
+ }
+
+ if (typeof ca === 'string') {
+ ca_paths = [ca]
+ } else if (
+ typeof ca === 'object' &&
+ (ca != null ? ca.length : undefined) != null
+ ) {
+ ca_paths = ca
+ } else {
+ console.error('problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH')
+ }
+
+ settings.ldap.server.tlsOptions = {
+ rejectUnauthorized:
+ process.env.SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH === 'true',
+ ca: ca_paths, // e.g.'/etc/ldap/ca_certs.pem'
+ }
+ }
+}
+
+if (process.env.SHARELATEX_SAML_ENTRYPOINT) {
+ // NOTE: see https://github.com/node-saml/passport-saml/blob/master/README.md for docs of `server` options
+ let _saml_additionalAuthorizeParams,
+ _saml_additionalLogoutParams,
+ _saml_additionalParams,
+ _saml_expiration,
+ _saml_skew
+ settings.externalAuth = true
+ settings.saml = {
+ updateUserDetailsOnLogin:
+ process.env.SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN === 'true',
+ identityServiceName: process.env.SHARELATEX_SAML_IDENTITY_SERVICE_NAME,
+ emailField:
+ process.env.SHARELATEX_SAML_EMAIL_FIELD ||
+ process.env.SHARELATEX_SAML_EMAIL_FIELD_NAME,
+ firstNameField: process.env.SHARELATEX_SAML_FIRST_NAME_FIELD,
+ lastNameField: process.env.SHARELATEX_SAML_LAST_NAME_FIELD,
+ server: {
+ // strings
+ entryPoint: process.env.SHARELATEX_SAML_ENTRYPOINT,
+ callbackUrl: process.env.SHARELATEX_SAML_CALLBACK_URL,
+ issuer: process.env.SHARELATEX_SAML_ISSUER,
+ decryptionPvk: process.env.SHARELATEX_SAML_DECRYPTION_PVK,
+ decryptionCert: process.env.SHARELATEX_SAML_DECRYPTION_CERT,
+ signatureAlgorithm: process.env.SHARELATEX_SAML_SIGNATURE_ALGORITHM,
+ identifierFormat: process.env.SHARELATEX_SAML_IDENTIFIER_FORMAT,
+ attributeConsumingServiceIndex:
+ process.env.SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX,
+ authnContext: process.env.SHARELATEX_SAML_AUTHN_CONTEXT,
+ authnRequestBinding: process.env.SHARELATEX_SAML_AUTHN_REQUEST_BINDING,
+ validateInResponseTo: process.env.SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO,
+ cacheProvider: process.env.SHARELATEX_SAML_CACHE_PROVIDER,
+ logoutUrl: process.env.SHARELATEX_SAML_LOGOUT_URL,
+ logoutCallbackUrl: process.env.SHARELATEX_SAML_LOGOUT_CALLBACK_URL,
+ disableRequestedAuthnContext:
+ process.env.SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT === 'true',
+ forceAuthn: process.env.SHARELATEX_SAML_FORCE_AUTHN === 'true',
+ skipRequestCompression:
+ process.env.SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION === 'true',
+ acceptedClockSkewMs: (_saml_skew =
+ process.env.SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS)
+ ? (() => {
+ try {
+ return parseIntOrFail(_saml_skew)
+ } catch (error8) {
+ e = error8
+ return console.error(
+ 'Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS'
+ )
+ }
+ })()
+ : undefined,
+ requestIdExpirationPeriodMs: (_saml_expiration =
+ process.env.SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS)
+ ? (() => {
+ try {
+ return parseIntOrFail(_saml_expiration)
+ } catch (error9) {
+ e = error9
+ return console.error(
+ 'Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS'
+ )
+ }
+ })()
+ : undefined,
+ additionalParams: (_saml_additionalParams =
+ process.env.SHARELATEX_SAML_ADDITIONAL_PARAMS)
+ ? (() => {
+ try {
+ return JSON.parse(_saml_additionalParams)
+ } catch (error10) {
+ e = error10
+ return console.error(
+ 'Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS'
+ )
+ }
+ })()
+ : undefined,
+ additionalAuthorizeParams: (_saml_additionalAuthorizeParams =
+ process.env.SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS)
+ ? (() => {
+ try {
+ return JSON.parse(_saml_additionalAuthorizeParams)
+ } catch (error11) {
+ e = error11
+ return console.error(
+ 'Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS'
+ )
+ }
+ })()
+ : undefined,
+ additionalLogoutParams: (_saml_additionalLogoutParams =
+ process.env.SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS)
+ ? (() => {
+ try {
+ return JSON.parse(_saml_additionalLogoutParams)
+ } catch (error12) {
+ e = error12
+ return console.error(
+ 'Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS'
+ )
+ }
+ })()
+ : undefined,
+ },
+ }
+
+ // SHARELATEX_SAML_CERT cannot be empty
+ // https://github.com/node-saml/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102
+ if (process.env.SHARELATEX_SAML_CERT) {
+ settings.saml.server.cert = process.env.SHARELATEX_SAML_CERT
+ settings.saml.server.privateKey = process.env.SHARELATEX_SAML_PRIVATE_CERT
+ }
+}
+
+// Compiler
+// --------
+if (process.env.SANDBOXED_COMPILES === 'true') {
+ settings.clsi = {
+ dockerRunner: true,
+ docker: {
+ image: process.env.TEX_LIVE_DOCKER_IMAGE,
+ env: {
+ HOME: '/tmp',
+ PATH:
+ process.env.COMPILER_PATH ||
+ '/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
+ },
+ user: 'www-data',
+ },
+ }
+
+ if (settings.path == null) {
+ settings.path = {}
+ }
+ settings.path.synctexBaseDir = () => '/compile'
+ if (process.env.SANDBOXED_COMPILES_SIBLING_CONTAINERS === 'true') {
+ console.log('Using sibling containers for sandboxed compiles')
+ if (process.env.SANDBOXED_COMPILES_HOST_DIR) {
+ settings.path.sandboxedCompilesHostDir =
+ process.env.SANDBOXED_COMPILES_HOST_DIR
+ } else {
+ console.error(
+ 'Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set'
+ )
+ }
+ }
+}
+
+// Templates
+// ---------
+if (process.env.SHARELATEX_TEMPLATES_USER_ID) {
+ settings.templates = {
+ mountPointUrl: '/templates',
+ user_id: process.env.SHARELATEX_TEMPLATES_USER_ID,
+ }
+
+ settings.templateLinks = parse(
+ process.env.SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS
+ )
+}
+
+// /Learn
+// -------
+if (process.env.SHARELATEX_PROXY_LEARN != null) {
+ settings.proxyLearn = parse(process.env.SHARELATEX_PROXY_LEARN)
+}
+
+// /References
+// -----------
+if (process.env.SHARELATEX_ELASTICSEARCH_URL != null) {
+ settings.references.elasticsearch = {
+ host: process.env.SHARELATEX_ELASTICSEARCH_URL,
+ }
+}
+
+// TeX Live Images
+// -----------
+if (process.env.ALL_TEX_LIVE_DOCKER_IMAGES != null) {
+ allTexLiveDockerImages = process.env.ALL_TEX_LIVE_DOCKER_IMAGES.split(',')
+}
+if (process.env.ALL_TEX_LIVE_DOCKER_IMAGE_NAMES != null) {
+ allTexLiveDockerImageNames =
+ process.env.ALL_TEX_LIVE_DOCKER_IMAGE_NAMES.split(',')
+}
+if (allTexLiveDockerImages != null) {
+ settings.allowedImageNames = []
+ for (let index = 0; index < allTexLiveDockerImages.length; index++) {
+ const fullImageName = allTexLiveDockerImages[index]
+ const imageName = Path.basename(fullImageName)
+ const imageDesc =
+ allTexLiveDockerImageNames != null
+ ? allTexLiveDockerImageNames[index]
+ : imageName
+ settings.allowedImageNames.push({ imageName, imageDesc })
+ }
+}
+
+// With lots of incoming and outgoing HTTP connections to different services,
+// sometimes long running, it is a good idea to increase the default number
+// of sockets that Node will hold open.
+const http = require('http')
+http.globalAgent.maxSockets = 300
+const https = require('https')
+https.globalAgent.maxSockets = 300
+
+module.exports = settings
diff --git a/tasks/CreateAndDestoryUsers.coffee b/tasks/CreateAndDestoryUsers.coffee
deleted file mode 100644
index 14ed4ca360..0000000000
--- a/tasks/CreateAndDestoryUsers.coffee
+++ /dev/null
@@ -1,56 +0,0 @@
-
-module.exports = (grunt) ->
-
- grunt.registerTask 'user:create-admin', "Create a user with the given email address and make them an admin. Update in place if the user already exists. Usage: grunt user:create-admin --email joe@example.com", () ->
- done = @async()
- email = grunt.option("email")
- if !email?
- console.error "Usage: grunt user:create-admin --email=joe@example.com"
- process.exit(1)
-
- settings = require "settings-sharelatex"
- UserRegistrationHandler = require "../web/app/src/Features/User/UserRegistrationHandler"
- OneTimeTokenHandler = require "../web/app/src/Features/Security/OneTimeTokenHandler"
- UserRegistrationHandler.registerNewUser {
- email: email
- password: require("crypto").randomBytes(32).toString("hex")
- }, (error, user) ->
- if error? and error?.message != "EmailAlreadyRegistered"
- throw error
- user.isAdmin = true
- user.save (error) ->
- throw error if error?
- ONE_WEEK = 7 * 24 * 60 * 60 # seconds
- OneTimeTokenHandler.getNewToken "password", { expiresIn: ONE_WEEK, email:user.email, user_id: user._id.toString() }, (err, token)->
- return next(err) if err?
-
- console.log ""
- console.log """
- Successfully created #{email} as an admin user.
-
- Please visit the following URL to set a password for #{email} and log in:
-
- #{settings.siteUrl}/user/password/set?passwordResetToken=#{token}
-
- """
- done()
-
- grunt.registerTask 'user:delete', "deletes a user and all their data, Usage: grunt user:delete --email joe@example.com", () ->
- done = @async()
- email = grunt.option("email")
- if !email?
- console.error "Usage: grunt user:delete --email=joe@example.com"
- process.exit(1)
- settings = require "settings-sharelatex"
- UserGetter = require "../web/app/src/Features/User/UserGetter"
- UserDeleter = require "../web/app/src/Features/User/UserDeleter"
- UserGetter.getUser email:email, (error, user) ->
- if error?
- throw error
- if !user?
- console.log("user #{email} not in database, potentially already deleted")
- return done()
- UserDeleter.deleteUser user._id, (err)->
- if err?
- throw err
- done()
diff --git a/tasks/ProjectSize.coffee b/tasks/ProjectSize.coffee
deleted file mode 100644
index 1d02d33d26..0000000000
--- a/tasks/ProjectSize.coffee
+++ /dev/null
@@ -1,24 +0,0 @@
-# require("coffee-script")
-
-# fs = require("fs")
-# _ = require("underscore")
-
-# if not process.argv[2]
-# console.log "Usage: coffee project_size.coffee user_files_path"
-# else
-# dirPath = process.argv[2]
-# if not fs.lstatSync(dirPath).isDirectory()
-# console.log dirPath + " directory not exist"
-# else
-# fs.readdir dirPath, (err, files)->
-# projects = []
-# files.forEach (file)->
-# project_id = file.split("_")[0]
-# if !projects[project_id]
-# projects[project_id] = 0
-# projects[project_id] += fs.lstatSync(dirPath+"/"+file).size
-
-# ids = _.keys projects
-# console.log "project \t size"
-# ids.forEach (id)->
-# console.log id + "\t" + projects[id]
diff --git a/vendor/envsubst b/vendor/envsubst
new file mode 100644
index 0000000000..f7ad8081d0
Binary files /dev/null and b/vendor/envsubst differ