diff --git a/.devcontainer/Dockerfile b/.devcontainer/devops/Dockerfile similarity index 100% rename from .devcontainer/Dockerfile rename to .devcontainer/devops/Dockerfile diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devops/devcontainer.json similarity index 94% rename from .devcontainer/devcontainer.json rename to .devcontainer/devops/devcontainer.json index 5f850dc2..ad6788aa 100755 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devops/devcontainer.json @@ -1,5 +1,5 @@ { - "name": "NF-NEURO development container", + "name": "devops", "build": { "dockerfile": "Dockerfile", "args": { @@ -8,8 +8,8 @@ } }, "forwardPorts": [3000], - "onCreateCommand": "bash .devcontainer/onCreateCommand.sh", - "updateContentCommand": "bash .devcontainer/updateContentCommand.sh", + "onCreateCommand": "bash .devcontainer/devops/onCreateCommand.sh", + "updateContentCommand": "bash .devcontainer/devops/updateContentCommand.sh", "postStartCommand": "git config --global --add safe.directory ${containerWorkspaceFolder}", "containerEnv": { "WORKSPACE": "${containerWorkspaceFolder}" diff --git a/.devcontainer/onCreateCommand.sh b/.devcontainer/devops/onCreateCommand.sh similarity index 100% rename from .devcontainer/onCreateCommand.sh rename to .devcontainer/devops/onCreateCommand.sh diff --git a/.devcontainer/updateContentCommand.sh b/.devcontainer/devops/updateContentCommand.sh similarity index 100% rename from .devcontainer/updateContentCommand.sh rename to .devcontainer/devops/updateContentCommand.sh diff --git a/.devcontainer/prototyping/Dockerfile b/.devcontainer/prototyping/Dockerfile new file mode 100755 index 00000000..f39e63ce --- /dev/null +++ b/.devcontainer/prototyping/Dockerfile @@ -0,0 +1,17 @@ +FROM scilus/scilus:2.0.2 + +ARG POETRY_VERSION + +ENV POETRY_VERSION=${POETRY_VERSION:-1.8.*} + +RUN apt update && apt install -y \ + curl \ + git \ + openjdk-17-jre \ + python3-venv \ + wget \ + && rm -rf /var/lib/apt/lists/* + +RUN python3 -m pip install pipx && \ + python3 -m pipx ensurepath && \ + pipx install poetry==${POETRY_VERSION} diff --git a/.devcontainer/prototyping/devcontainer.json b/.devcontainer/prototyping/devcontainer.json new file mode 100755 index 00000000..ccdc64e6 --- /dev/null +++ b/.devcontainer/prototyping/devcontainer.json @@ -0,0 +1,77 @@ +{ + "name": "prototyping", + "build": { + "dockerfile": "Dockerfile", + "args": { + "POETRY_VERSION": "1.8.*" + } + }, + "forwardPorts": [3000], + "initializeCommand": "bash .devcontainer/prototyping/initializeCommand.sh", + "onCreateCommand": "bash .devcontainer/prototyping/onCreateCommand.sh", + "updateContentCommand": "bash .devcontainer/prototyping/updateContentCommand.sh", + "postStartCommand": "git config --global --add safe.directory ${containerWorkspaceFolder}", + "containerEnv": { + "WORKSPACE": "${containerWorkspaceFolder}" + }, + "features": { + "ghcr.io/devcontainers/features/git:1": {}, + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, + "ghcr.io/devcontainers-contrib/features/apt-get-packages:1": {}, + "ghcr.io/robsyme/features/nextflow:1": {}, + "ghcr.io/devcontainers-contrib/features/curl-apt-get:1": {}, + "ghcr.io/devcontainers-contrib/features/tmux-apt-get:1": {}, + "ghcr.io/devcontainers-contrib/features/wget-apt-get:1": {} + }, + "mounts": [ + { + "source": "nf-proto-${devcontainerId}-venv", + "target": "${containerWorkspaceFolder}/.venv", + "type": "volume" + }, + { + "target": "/tmp", + "type": "volume" + }, + { + "source": "nf-proto-${devcontainerId}-profile", + "target": "/root", + "type": "volume" + }, + { + "target": "/root/.vscode-server", + "type": "volume" + } + ], + "customizations": { + "vscode": { + "settings": { + "python": { + "analysis.exclude": [ + "${containerWorkspaceFolder}/.vscode", + "${containerWorkspaceFolder}/.venv", + "**/__pycache__", + "${containerWorkspaceFolder}/.git" + ], + "analysis.ignore": [ + "${containerWorkspaceFolder}/.vscode", + "${containerWorkspaceFolder}/.venv", + "**/__pycache__", + "${containerWorkspaceFolder}/.git" + ], + "createEnvironment.trigger": "off", + "interpreter.infoVisibility": "always", + "poetryPath": "/root/.local/bin/poetry", + "defaultInterpreterPath": "${containerWorkspaceFolder}/.venv", + "terminal.activateEnvironment": true, + "terminal.activateEnvInCurrentTerminal": true, + "terminal.focusAfterLaunch": true + } + }, + "extensions": ["nf-neuro.nf-neuro-extensionpack"] + } + }, + "init": true, + "privileged": true +} diff --git a/.devcontainer/prototyping/initializeCommand.sh b/.devcontainer/prototyping/initializeCommand.sh new file mode 100644 index 00000000..fbaa9a89 --- /dev/null +++ b/.devcontainer/prototyping/initializeCommand.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +# The prototyping environment must not be started in the nf-neuro repository. + +if [[ $PWD =~ .*/nf-neuro$ ]] +then + +echo "You cannot open a prototyping environment in the nf-neuro repository." +echo "Please, locate yourself elsewhere, outside the nf-neuro tree if possible." + +exit 1 + +fi diff --git a/.devcontainer/prototyping/onCreateCommand.sh b/.devcontainer/prototyping/onCreateCommand.sh new file mode 100755 index 00000000..460045c4 --- /dev/null +++ b/.devcontainer/prototyping/onCreateCommand.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +# Setup for NF-CORE + +mkdir -p $XDG_CONFIG_HOME/nf-neuro +touch $XDG_CONFIG_HOME/nf-neuro/.env +echo "source $XDG_CONFIG_HOME/nf-neuro/.env" >> ~/.bashrc + +# Try to download nf-neuro setup with poetry. If it fails, we defer to pip +{ + NFNEURO_RAW_REPOSITORY=https://raw.githubusercontent.com/scilus/nf-neuro/main + NFCORE_VERSION=2.14.1 + wget -N $NFNEURO_RAW_REPOSITORY/pyproject.toml \ + $NFNEURO_RAW_REPOSITORY/poetry.toml \ + $NFNEURO_RAW_REPOSITORY/poetry.lock +} || { + echo "Failed to download nf-neuro base project configuration. Creating requirements.txt for pip" + echo "nf-core==$NFCORE_VERSION" > requirements.txt +} + +# Try to download VSCode settings from nf-neuro +{ + NFNEURO_RAW_REPOSITORY=https://raw.githubusercontent.com/scilus/nf-neuro/main + mkdir -p .vscode + wget -N -P .vscode $NFNEURO_RAW_REPOSITORY/.vscode/settings.json +} || { + echo "Could not fetch default extension settings from nf-neuro" +} + +# Initial setup for a pipeline prototyping environment +# - nf-core requires a .nf-core.yml file present, else it bugs out (issue nfcore/tools#3340) +touch .nf-core.yml diff --git a/.devcontainer/prototyping/updateContentCommand.sh b/.devcontainer/prototyping/updateContentCommand.sh new file mode 100644 index 00000000..8a3e9f98 --- /dev/null +++ b/.devcontainer/prototyping/updateContentCommand.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash + + +maxmem=$(grep MemTotal /proc/meminfo | awk '{print $2}') + +cat < $XDG_CONFIG_HOME/nf-neuro/.env +# This file is used to store environment variables for the project. +# It is sourced by the shell on startup of every terminals. + +export PROFILE=docker +export NFCORE_MODULES_GIT_REMOTE=https://github.com/scilus/nf-neuro.git +export NFCORE_MODULES_BRANCH=main +export NFCORE_SUBWORKFLOWS_GIT_REMOTE=https://github.com/scilus/nf-neuro.git +export NFCORE_SUBWORKFLOWS_BRANCH=main + +export DEVCONTAINER_RAM_LIMIT_GB=$((maxmem / 1024 / 1024)) +export DEVCONTAINER_CPU_LIMIT=$(grep -c ^processor /proc/cpuinfo) + +EOF + +unset maxmem +NFCORE_VERSION=2.14.1 + +if [ -f poetry.lock ] && [ -f pyproject.toml ] +then + poetry install --no-root +elif [ -f requirements.txt ] +then + python3 -m pip install -r requirements.txt +else + echo "No requirements file found, installing nf-core to version $NFCORE_VERSION using pip" + python3 -m pip install nf-core==$NFCORE_VERSION +fi diff --git a/.vscode/extension-package/CHANGELOG.md b/.vscode/extension-package/CHANGELOG.md index a0c5478d..112a60a7 100644 --- a/.vscode/extension-package/CHANGELOG.md +++ b/.vscode/extension-package/CHANGELOG.md @@ -1,5 +1,9 @@ # Change Log +## [1.0.2] - 2024-12-10 + +- Add `errorlens` to packages + ## [1.0.1] - 2024-09-16 - Upgrade extension list diff --git a/.vscode/extension-package/package-lock.json b/.vscode/extension-package/package-lock.json index 041d7818..f43d13d9 100644 --- a/.vscode/extension-package/package-lock.json +++ b/.vscode/extension-package/package-lock.json @@ -1,14 +1,14 @@ { "name": "nf-neuro-extensionpack", - "version": "1.0.0", + "version": "1.0.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "nf-neuro-extensionpack", - "version": "1.0.0", + "version": "1.0.2", "engines": { - "vscode": "^1.84.0" + "vscode": "^1.95.0" } } } diff --git a/.vscode/extension-package/package.json b/.vscode/extension-package/package.json index 2e35a9b5..d82f1cba 100644 --- a/.vscode/extension-package/package.json +++ b/.vscode/extension-package/package.json @@ -7,10 +7,10 @@ "url": "https://github.com/scilus/nf-neuro.git" }, "icon": "icon.avif", - "version": "1.0.1", + "version": "1.0.2", "publisher": "nf-neuro", "engines": { - "vscode": "^1.84.0" + "vscode": "^1.95.0" }, "categories": [ "Extension Packs" @@ -50,6 +50,7 @@ "ryu1kn.annotator", "trond-snekvik.simple-rst", "twxs.cmake", + "usernamehw.errorlens", "yahyabatulu.vscode-markdown-alert", "yzhang.markdown-all-in-one" ] diff --git a/README.md b/README.md index 56408d8e..3cf0d771 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ primary focus of the library is to provide pre-built processes and processing se technologies and made easily available to pipeline's developers through the `nf-core` framework. -# WHY ? `nf-neuro` +# WHY ? `nf-neuro` **Let's say you develop a pipeline for neuroimaging**. You want to make it the more _efficient,_ _reliable, reproducible_ and also be able to _evaluate it_ and _control the quality_ of its outputs. @@ -30,212 +30,106 @@ closely aligned with [nf-core](), but we'll make you adapt to them slowly as you haven't finished complying to all of them ourselves). Using `nf-neuro` helps accelerate development in **neuroimaging** and produces better research outcomes for all ! +# Where do I start ? + +Well, it depends on what you want to do. If you want to : + +- **Learn about the modules and subworkflows** in `nf-neuro`, go to the [discovery](#discovering-nf-neuro) section. +- **Create a new pipeline quickly**, using **modules** and **subworkflows** from `nf-neuro`, go to the + [prototyping](./docs/PROTOTYPING.md) section. +- **Create or publish a production pipeline** branded with `nf-neuro`, go to the + [porting prototypes](./docs/PRODUCTION.md) section. +- **Contribute new modules and subworkflows** to `nf-neuro`, go to the + [contribution](#contributing-to-the-nf-neuro-project) section. + --- -- [Using modules from `nf-neuro`](#using-modules-from-nf-neuro) -- [Developing in `nf-neuro`](#developing-in-nf-neuro) - - [Manual configuration](#manual-configuration) - - [Dependencies](#dependencies) - - [Python environment](#python-environment) - - [Loading the project's environment](#loading-the-projects-environment) - - [Global environment](#global-environment) - - [Working with VS Code](#working-with-vs-code) - - [Configuration via the `devcontainer`](#configuration-via-the-devcontainer) +- [Discovering `nf-neuro`](#discovering-nf-neuro) + - [Getting info on components from `nf-neuro`](#getting-info-on-components-from-nf-neuro) +- [Pipeline creation with `nf-neuro`](#pipeline-creation-with-nf-neuro) + - [Prototyping using components from `nf-neuro`](#prototyping-using-components-from-nf-neuro) + - [Porting prototypes to `nf-` ready pipelines](#porting-prototypes-to-nf--ready-pipelines) - [Contributing to the `nf-neuro` project](#contributing-to-the-nf-neuro-project) - - [Adding a new module to nf-neuro](./docs/MODULE.md#adding-a-new-module-to-nf-neuro) - - [Generate the template](./docs/MODULE.md#generate-the-template) - - [Edit the template](./docs/MODULE.md#edit-the-template) - - [Edit `main.nf`](./docs/MODULE.md#edit-mainnf) - - [Edit `environment.yml`](./docs/MODULE.md#edit-environmentyml) - - [Edit `meta.yml`](./docs/MODULE.md#edit-metayml) - - [Create test cases](./docs/MODULE.md#create-test-cases) - - [Edit `tests/main.nf.test`](./docs/MODULE.md#edit-testsmainnftest) - - [Edit `tests/nextflow.config`](./docs/MODULE.md#edit-testsnextflowconfig) - - [Generate tests snapshots](./docs/MODULE.md#generate-tests-snapshots) - - [Request for more test resources](./docs/MODULE.md#request-for-more-test-resources) - - [Lint your code](./docs/MODULE.md#lint-your-code) - - [Submit your PR](./docs/MODULE.md#submit-your-pr) - - [Defining optional input parameters](./docs/MODULE.md#defining-optional-input-parameters) - - [Test data infrastructure](./docs/MODULE.md#test-data-infrastructure) - - [Adding a new subworkflow to nf-neuro](./docs/SUBWORKFLOWS.md#adding-a-new-subworkflow-to-nf-neuro) - - [Generate the template](./docs/SUBWORKFLOWS.md#generate-the-template) - - [Edit the template](./docs/SUBWORKFLOWS.md#edit-the-template) - - [Edit `main.nf`](./docs/SUBWORKFLOWS.md#edit-mainnf) - - [Define your subworkflow inputs](./docs/SUBWORKFLOWS.md#define-your-subworkflow-inputs) - - [Fill the `main:` section](./docs/SUBWORKFLOWS.md#fill-the-main-section) - - [Define your Workflow outputs](./docs/SUBWORKFLOWS.md#define-your-workflow-outputs) - - [Edit `meta.yml`](./docs/SUBWORKFLOWS.md#edit-metayml) - - [Create test cases](./docs/SUBWORKFLOWS.md#create-test-cases) - - [Lint your code](./docs/SUBWORKFLOWS.md#lint-your-code) - - [Submit your PR](./docs/SUBWORKFLOWS.md#submit-your-pr) - [Running tests](#running-tests) -- [Configuring Docker for easy usage](#configuring-docker-for-easy-usage) -- [Installing Prettier and editorconfig](#installing-prettier-and-editorconfig) --- -# Using modules from `nf-neuro` - -To import modules from `nf-neuro`, you first need to install [nf-core](https://github.com/nf-core/tools) -on your system (can be done simply using `pip install nf-core`). Once done, `nf-neuro` -modules are imported using this command : - -```bash -nf-core modules --git-remote https://github.com/scilus/nf-neuro.git install / -``` - -where you input the `` you want to import from the desired ``. To get -a list of the available modules, run : +# Discovering `nf-neuro` -```bash -nf-core modules --git-remote https://github.com/scilus/nf-neuro.git list remote -``` +To get information on `nf-neuro` components, you'll first need to install `python` and `nf-core`. We provide +extensive guidelines to do it in [this guide](./docs/environment/NFCORE.md). -The same can be done for subworkflows, replacing `modules` in the `nf-core` command by `subworkflows : - -```bash -nf-core subworkflows --git-remote https://github.com/scilus/nf-neuro.git install / -``` +## Getting info on components from `nf-neuro` -It can become heavy to always prepend the commands with `--git-remote`, more so if you need to specify a `--branch` where to fetch the information. You can instead define the `git-remote` and `branch` using _Environment Variables_ : +With your environment ready, you can list `nf-neuro` modules available with a simple command : ```bash -export NFCORE_MODULES_GIT_REMOTE=https://github.com/scilus/nf-neuro.git -export NFCORE_MODULES_GIT_BRANCH=main -export NFCORE_SUBWORKFLOWS_GIT_REMOTE=https://github.com/scilus/nf-neuro.git -export NFCORE_SUBWORKFLOWS_GIT_BRANCH=main +nf-core modules list remote ``` -and call all commands without specifying the `--git-remote` and `--branch` options, while still targeting the `nf-neuro` repository. - -# Developing in `nf-neuro` - -The `nf-neuro` project requires some specific tools to be installed on your system so that the development environment runs correctly. You can [install them manually](#manual-configuration), but if you desire to streamline the process and start coding faster, we highly recommend using the [VS Code development container](#configuration-via-the-devcontainer) to get fully configured in a matter of minutes. - -## Manual configuration - -### Dependencies - -- Python ≥ 3.8, < 3.13 -- Docker ≥ 24 (we recommend using [Docker Desktop](https://www.docker.com/products/docker-desktop)) -- Java Runtime ≥ 11, ≤ 17 - - On Ubuntu, install `openjdk-jre-` packages -- Nextflow ≥ 23.04.0 -- nf-test ≥ 0.9.0 -- Node ≥ 14, `Prettier` and `editorconfig` (see [below](#installing-prettier-and-editorconfig)) +

+ nf-core modules list remote +

-> [!IMPORTANT] -> Nextflow might not detect the right `Java virtual machine` by default, more so if -> multiple versions of the runtime are installed. If so, you need to set the environment -> variable `JAVA_HOME` to target the right one. -> -> - Linux : look in `/usr/lib/jvm` for -> a folder named `java--openjdk-` and use it as `JAVA_HOME`. -> - MacOS : if the `Java jvm` is the preferential one, use `JAVA_HOME=$(/usr/libexec/java_home)`. -> Else, look into `/Library/Java/JavaVirtualMachines` for the folder with the correct -> runtime version (named `jdk_1.jdk`) and use the -> following : `/Library/Java/JavaVirtualMachines/jdk_1.jdk/Contents/Home`. - -### Python environment - -The project uses _poetry_ to manage python dependencies. To install it using pipx, -run the following commands : +The same goes for `subworkflows` : ```bash -pip install pipx -pipx ensurepath -pipx install poetry==1.8.* +nf-core subworkflows list remote ``` -> [!NOTE] -> If the second command above fails, `pipx` cannot be found in the path. Prepend the -> second command with `$(which python) -m` and rerun the whole block. - -> [!WARNING] -> Poetry doesn't like when other python environments are activated around it. Make -> sure to deactivate any before calling `poetry` commands. +

+ nf-core subworkflows list remote +

-Once done, install the project with : +To get more information on a module (say `denoising/nlmeans`) use : ```bash -poetry install +nf-core modules info denoising/nlmeans ``` -### Loading the project's environment - -> [!IMPORTANT] -> Make sure no python environment is activated before running commands ! +

+ nf-core modules info +

-The project scripts and dependencies can be accessed using : +or for a subworkflow (say `preproc_t1`) : ```bash -poetry shell +nf-core subworkflows info preproc_t1 ``` -which will activate the project's python environment in the current shell. +

+ nf-core subworkflows info +

> [!NOTE] -> You will know the poetry environment is activated by looking at your shell. The -> input line should be prefixed by : `(nf-neuro-tools-py)`, with `` -> being the actual Python version used in the environment. - -To exit the environment, simply enter the `exit` command in the shell. +> Additionally, `VS Code` users can install the [nextflow extension](https://marketplace.visualstudio.com/items?itemName=nextflow.nextflow), +> which contains a language server that helps you in real time when coding. It gives you useful tooltips on modules inputs and outputs, commands +> to navigate between modules and workflows and highlights errors. For sure, you get all that for free if you use the `devcontainer` ! > [!IMPORTANT] -> Do not use traditional deactivation (calling `deactivate`), since it does not relinquish -> the environment gracefully, making it so you won't be able to reactivate it without -> exiting the shell. +> The `nextflow language server` is a precious resource that will help you resolve most exceptions existing within the metadata +> description of modules and workflows prescribed by `nf-core` and shown below. Thus, we highly recommend its use. -### Global environment +You'll get a good description of the modules's or subworkflow's `behavior` and `dependencies`, as well as a +thorough description of its `inputs` and `outputs`. -Set the following environment variables in your `.bashrc` (or whatever is the equivalent for your shell) : +# Pipeline creation with `nf-neuro` -```bash -export NFCORE_MODULES_GIT_REMOTE=https://github.com/scilus/nf-neuro.git -export NFCORE_MODULES_GIT_BRANCH=main -export NFCORE_SUBWORKFLOWS_GIT_REMOTE=https://github.com/scilus/nf-neuro.git -export NFCORE_SUBWORKFLOWS_GIT_BRANCH=main -``` - -This will make it so the `nf-core` commands target the right repository by default. Else, you'll need to add `--git-remote` and `--branch` options to pretty much all commands relating to `modules` and `subworkflows`. - -### Working with VS Code - -The `nf-neuro` project curates a bundle of useful extensions for Visual Studio Code, the `nf-neuro-extensions` package. You can find it easily on the [extension -marketplace](https://marketplace.visualstudio.com/items?itemName=nf-neuro.nf-neuro-extensionpack). - -## Configuration via the `devcontainer` - -The `devcontainer` definition for the project contains all required dependencies and setup -steps are automatically executed. To use this installation method, you need to have **Docker** (refer to [this section](#configuring-docker-for-easy-usage) for configuration requirements or validate your configuration) and **Visual Studio Code** installed on your system. - -Open the cloned repository in _VS Code_ and click on the arrow box in the lower left corner, to get a prompt to `Reopen in container`. The procedure -will start a docker build, wait for a few minutes and enjoy your fully configured development -environment. +## [Prototyping using components from `nf-neuro`](./docs/PROTOTYPING.md) -- Available in the container : - - - `nf-neuro`, `nf-core` all accessible through the terminal, which is configured to load - the `poetry` environment in shells automatically - - `nf-neuro` configured as the main repository for all `nf-core` commands, using `NFCORE_*` environment variables - - `git`, `github-cli` - - `curl`, `wget`, `apt-get` - - `nextflow`, `nf-test`, `docker`, `tmux` - -- Available in the VS Code IDE through extensions : - - Docker images and containers management - - Nextflow execution environemnt - - Python and C++ linting, building and debugging tools - - Github Pull Requests management - - Github flavored markdown previewing - - Test Data introspection - - Resource monitoring - - Remote development - - Live sharing with collaborators +## [Porting prototypes to `nf-` ready pipelines](./docs/PRODUCTION.md) # Contributing to the `nf-neuro` project -If you want to propose a new `module` to the repository, follow the guidelines in the [module creation](./docs/MODULE.md) documentation. The same goes for `subworkflows`, using [these guidelines](./docs/SUBWORKFLOWS.md) instead. We follow standards closely aligned with `nf-core`, with some exceptions on process atomicity and how test data is handled. Modules that don't abide to them won't be accepted and PR containing them will be closed automatically. +> [!IMPORTANT] +> First, follow the [devops guide](./docs/environment/DEVOPS.md) to setup your +> `development environment` or check if your current one meets the requirements. + +`nf-neuro` accepts contribution of new **modules** and **subworkflows** to its library. You'll need first to +[setup your environment](./docs/DEVOPS.md), for which we have devised clever ways to streamline the procedure. +Then, depending on which kind of component you want to submit, you'll either need to follow the [module creation](./docs/MODULE.md) +or the [subworkflow creation](./docs/SUBWORKFLOWS.md) guidelines. Components that don't abide to them won't be accepted +and PR containing them will be closed automatically. # Running tests @@ -246,39 +140,3 @@ nf-core modules test ``` The tool can be omitted to run tests for all modules in a category. - -# Configuring Docker for easy usage - -The simplest way of installing everything Docker is to use [Docker Desktop](https://www.docker.com/products/docker-desktop). You can also go the [engine way](https://docs.docker.com/engine/install) and install Docker manually. - -Once installed, you need to configure some access rights to the Docker daemon. The easiest way to do this is to add your user to the `docker` group. This can be done with the following command : - -```bash -sudo groupadd docker -sudo usermod -aG docker $USER -``` - -After running this command, you need to log out and log back in to apply the changes. - -# Installing Prettier and editorconfig - -To install **Prettier** and **editorconfig** for the project, you need to have `node` and `npm` installed on your system to at least version 14. On Ubuntu, you can do it using snap : - -```bash -sudo snap install node --classic -``` - -However, if you cannot install snap, or have another OS, refer to the [official documentation](https://nodejs.org/en/download/package-manager/) for the installation procedure. - -Under the current configuration for the _Development Container_, for this project, we use the following procedure, considering `${NODE_MAJOR}` is at least 14 for Prettier : - -```bash -curl -fsSL https://deb.nodesource.com/setup_${NODE_MAJOR}.x | bash - &&\ -apt-get install -y nodejs - -npm install --save-dev --save-exact prettier -npm install --save-dev --save-exact editorconfig-checker - -echo "function prettier() { npm exec prettier $@; }" >> ~/.bashrc -echo "function editorconfig-checker() { npm exec editorconfig-checker $@; }" >> ~/.bashrc -``` diff --git a/docs/PRODUCTION.md b/docs/PRODUCTION.md new file mode 100644 index 00000000..74e88829 --- /dev/null +++ b/docs/PRODUCTION.md @@ -0,0 +1,3 @@ +# Porting prototypes to `nf-` ready pipelines + +**SECTION TO COME** diff --git a/docs/PROTOTYPING.md b/docs/PROTOTYPING.md new file mode 100644 index 00000000..447c212d --- /dev/null +++ b/docs/PROTOTYPING.md @@ -0,0 +1,242 @@ +# Prototyping using components from `nf-neuro` + +- [Prototyping using components from `nf-neuro`](#prototyping-using-components-from-nf-neuro) + - [Environment configuration](#environment-configuration) + - [Basic prototype pipeline creation](#basic-prototype-pipeline-creation) + - [`main.nf`](#mainnf) + - [`main.nf` example](#mainnf-example) + - [`nextflow.config`](#nextflowconfig) + +## Environment configuration + +To get setup fast, we recommend using **VS Code** and the `development container`. Follow the +[guide here](./environment/DEVCONTAINER.md#prototyping-environment) to do so. You can also use +[those instructions](./environment/NFCORE.md#manual-installation) to setup yourself manually. + +## Basic prototype pipeline creation + +To create a prototype pipeline (for personal use or testing), you need first to create a few files, that +you can keep empty for now : + +``` +nextflow.config +main.nf +.nf-core.yml +``` + +### `nextflow.config` + +The `nextflow.config` file contains **parameters** that users can change when calling you pipeline +(prefixed with `params.`) and default configurations for execution. Here is an example of a basic +`nextflow.config` file : + +```nextflow +params.input = false +params.output = 'output' + +docker.enabled = true +docker.runOptions = '-u $(id -u):$(id -g)' +``` + +The parameters defined with `params.` can be changed at execution by another `nextflow.config` file or +by supplying them as arguments when calling the pipeline using `nextflow run` : + +```bash +nextflow run main.nf --input /path/to/input --output /path/to/output +``` + +### `main.nf` + +This file is your pipeline execution file. It contains all modules and subworkflows you want to run, and the +channels that define how data passes between them. This is also where you define how to fetch your input files. +This can be done using a workflow definition, here is an example for a basic usage: + +```nextflow +#!/usr/bin/env nextflow + +workflow get_data { + main: + if ( !params.input ) { + log.info "You must provide an input directory containing all images using:" + log.info "" + log.info " --input=/path/to/[input] Input directory containing your subjects" + log.info "" + log.info " [input]" + log.info " ├-- S1" + log.info " | ├-- *dwi.nii.gz" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *revb0.nii.gz" + log.info " | └-- *t1.nii.gz" + log.info " └-- S2" + log.info " ├-- *dwi.nii.gz" + log.info " ├-- *bval" + log.info " ├-- *bvec" + log.info " ├-- *revb0.nii.gz" + log.info " └-- *t1.nii.gz" + log.info "" + error "Please resubmit your command with the previous file structure." + } + input = file(params.input) + // ** Loading all files. ** // + dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bval,bvec}", size: 3, flat: true) + { it.parent.name } + .map{ sid, bvals, bvecs, dwi -> [ [id: sid], dwi, bvals, bvecs ] } // Reordering the inputs. + rev_channel = Channel.fromFilePairs("$input/**/*revb0.nii.gz", size: 1, flat: true) + { it.parent.name } + .map{ sid, rev -> [ [id: sid], rev ] } + t1_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) + { it.parent.name } + .map{ sid, t1 -> [ [id: sid], t1 ] } + emit: + dwi = dwi_channel + rev = rev_channel + t1 = t1_channel +} + +workflow { + // ** Now call your input workflow to fetch your files ** // + data = get_data() + data.dwi.view() // Contains your DWI data: [meta, dwi, bval, bvec] + data.rev.view() // Contains your reverse B0 data: [meta, rev] + data.t1.view() // Contains your anatomical data (T1 in this case): [meta, t1] +} +``` + +Now, you can install the modules you want to include in your pipeline. Let's import the `denoising/nlmeans` module +for T1 denoising. To do so, simply install it using the `nf-core modules install` command. + +```bash +nf-core modules install denoising/nlmeans +``` + +To use it in your pipeline, you need to import it at the top of your `main.nf` file. You can do it using the +`include { DENOISING_NLMEANS } from ./modules/nf-neuro/denoising/nlmeans/main.nf` statement. Once done, you can +use `DENOISING_NLMEANS` in your pipeline and feed your inputs to it! To have a look at which files are required to +run the module, use the `nf-core modules info denoising/nlmeans` command (if you are using **VS Code**, install the +`nextflow` extension, that gives you hints on modules and subworkflows intputs). A complete example (e.g., fetching the +inputs, importing the module, and supplying the inputs to the modules) can be seen below: + +#### `main.nf` example + +```nextflow +#!/usr/bin/env nextflow + +include { DENOISING_NLMEANS } from './modules/nf-neuro/denoising/nlmeans/main.nf' + +workflow get_data { + main: + if ( !params.input ) { + log.info "You must provide an input directory containing all images using:" + log.info "" + log.info " --input=/path/to/[input] Input directory containing your subjects" + log.info "" + log.info " [input]" + log.info " ├-- S1" + log.info " | ├-- *dwi.nii.gz" + log.info " | ├-- *dwi.bval" + log.info " | ├-- *dwi.bvec" + log.info " | ├-- *revb0.nii.gz" + log.info " | └-- *t1.nii.gz" + log.info " └-- S2" + log.info " ├-- *dwi.nii.gz" + log.info " ├-- *bval" + log.info " ├-- *bvec" + log.info " ├-- *revb0.nii.gz" + log.info " └-- *t1.nii.gz" + log.info "" + error "Please resubmit your command with the previous file structure." + } + } + input = file(params.input) + // ** Loading all files. ** // + dwi_channel = Channel.fromFilePairs("$input/**/*dwi.{nii.gz,bval,bvec}", size: 3, flat: true) + { it.parent.name } + .map{ sid, bvals, bvecs, dwi -> [ [id: sid], dwi, bvals, bvecs ] } // Reordering the inputs. + rev_channel = Channel.fromFilePairs("$input/**/*revb0.nii.gz", size: 1, flat: true) + { it.parent.name } + .map{ sid, rev -> [ [id: sid], rev ] } + anat_channel = Channel.fromFilePairs("$input/**/*t1.nii.gz", size: 1, flat: true) + { it.parent.name } + .map{ sid, t1 -> [ [id: sid], t1 ] } + emit: // Those three lines below define your named output, use those labels to select which file you want. + dwi = dwi_channel + rev = rev_channel + anat = anat_channel +} + +workflow { + inputs = get_data() + // ** Create the input channel for nlmeans. ** // + // ** - Note that it also can take a mask as input, but it is not required. ** // + // ** - Replacing it by an empty list here. ** // + ch_denoising = inputs.t1 + .map{ it + [[]] } // This add one empty list to the channel, since we do not have a mask. + + // ** Run DENOISING_NLMEANS ** // + DENOISING_NLMEANS( ch_denoising ) + DENOISING_NLMEANS.out.image.view() // This will show the output of the module. + + // ** You can then reuse the outputs and supply them to another module/subworkflow! ** // + //ch_nextmodule = DENOISING_NLMEANS.out.image + // .join(ch_another_file) + // NEXT_MODULE( ch_nextmodule ) +} +``` + +### Fetching the outputs from the modules + +You now have a working `main.nf` file. You could execute the pipeline, but the outputs would be hard to access. Let's define the +`publishDir` into which to place them using the `nextflow.config` file and the `output` parameter we defined earlier : + +```nextflow +process { + publishDir = { "${params.output}/$meta.id/${task.process.replaceAll(':', '-')}" } +} +``` + +> [!IMPORTANT] +> Here, `meta` is a special variable, defined in every **module**, a map that gets passed around with the data, into which you can +> put information. Beware however, as it is also used to **join channels together** by looking at there whole content. + +### Defining modules parameters + +Once this is done, you might want to supply parameters for some of your modules that could be modified when calling the pipeline. +To know which parameters are accepted in your modules, refer to the `main.nf` of the specific `nf-neuro` module and look for parameters +that are prefixed with `ext.`, placed just before its **bash script**. `denoising/nlmeans` takes 1 possible parameter, `number_of_coils`, +that we add to the `nextflow.config` file : + +```nextflow +params.number_of_coils = 1 +``` + +The last step is to bind your parameters to the specific module they are meant for. This is done using a **process selector** (`withName`), that +links the `ext.` parameter to the `params.` parameter : + +```nextflow +withName: 'DENOISING_NLMEANS' { + ext.number_of_coils = params.number_of_coils +} +``` + +That's it! Your `nextflow.config` should look something like this: + +``` +params.input = false +params.output = 'output' + +docker.enabled = true +docker.runOptions = '-u $(id -u):$(id -g)' + +process { + publishDir = { "${params.output}/$meta.id/${task.process.replaceAll(':', '-')}" } +} + +params.number_of_coils = 1 + +withName: 'DENOISING_NLMEANS' { + ext.number_of_coils = params.number_of_coils +} +``` + +Once your pipeline is built, or when you want to test it, run `nextflow run main.nf --input `. diff --git a/docs/environment/DEVCONTAINER.md b/docs/environment/DEVCONTAINER.md new file mode 100644 index 00000000..514a2ffa --- /dev/null +++ b/docs/environment/DEVCONTAINER.md @@ -0,0 +1,104 @@ +# Using `nf-neuro` development containers + +`nf-neuro` comes preloaded with a set of development containers destined at streamlining +your development. They provide `pre-installed` environments for you to start programming +new `pipelines` or `nf-neuro components`. + +- [Requirements](#requirements) + - [Configuring Docker for easy usage](#configuring-docker-for-easy-usage) +- [Prototyping environment](#prototyping-environment) + - [Usage](#usage) + - [Available in the container](#available-in-the-container) + - [Available in the VS Code IDE through extensions](#available-in-the-vs-code-ide-through-extensions) +- [Production environment](#production-environment) +- [Development environment](#development-environment) + - [Usage](#usage-1) + - [Available in the container](#available-in-the-container-1) + - [Available in the VS Code IDE through extensions](#available-in-the-vs-code-ide-through-extensions-1) + +## Requirements + +- [VS Code](https://code.visualstudio.com) ≥ 1.95 +- [Docker](https://www.docker.com/get-started/) ≥ 24 (we recommend using [Docker Desktop](https://www.docker.com/products/docker-desktop)) + +### Configuring Docker for easy usage + +The simplest way of installing everything Docker is to use [Docker Desktop](https://www.docker.com/products/docker-desktop). You can also go the [engine way](https://docs.docker.com/engine/install) and install Docker manually. + +Once installed, you need to configure some access rights to the Docker daemon. The easiest way to do this is to add your user to the `docker` group. This can be done with the following command : + +```bash +sudo groupadd docker +sudo usermod -aG docker $USER +``` + +After running this command, you need to log out and log back in to apply the changes. + +## Prototyping environment + +> [!WARNING] +> The `prototyping` environment definition is not meant to be run from the `nf-neuro` repository root. +> Locate your project elsewhere (we recommend putting it outside this directory to prevent +> conflicts with `git`). + +### Usage + +Copy the **devcontainer** definition located in `.devcontainer/prototyping` inside a `.devcontainer` +directory inside your project. Then, open your project with **VS CODE** and click on the **blue box** in the +lower left corner, to get a prompt to `Reopen in container`. The procedure will start a docker build, wait +for a few minutes and enjoy your fully configured development environment. + +#### Available in the container + +- `nf-core` accessible through the terminal, which is configured to access `nf-neuro` modules and subworkflows +- `git`, `github-cli` +- `curl`, `wget`, `apt-get` +- `nextflow`, `docker`, `tmux` + +#### Available in the VS Code IDE through extensions + +- Docker images and containers management +- Nextflow execution environemnt +- Python and C++ linting, building and debugging tools +- Github Pull Requests management +- Github flavored markdown previewing +- Neuroimaging data viewer +- Test Data introspection +- Resource monitoring +- Remote development +- Live sharing with collaborators + +## Production environment + +SECTION TO COME + +## Development environment + +### Usage + +To use the development environment, you need to have the repository cloned. You can do it using +_VS Code_. Once opened in _VS CODE_, click on the arrow box in the lower left corner, to get a prompt to +`Reopen in container`. Select the `devops` container. The procedure will start a docker build, wait for a +few minutes and enjoy your fully configured development environment. + +#### Available in the container + +- `nf-neuro`, `nf-core` all accessible through the terminal, which is configured to load + the `poetry` environment in shells automatically +- `nf-neuro` configured as the main repository for all `nf-core` commands, using `NFCORE_-` environment variables +- `git`, `github-cli` +- `curl`, `wget`, `apt-get` +- `nextflow`, `nf-test`, `docker`, `tmux` + +#### Available in the VS Code IDE through extensions + +- Docker images and containers management +- Nextflow execution environemnt +- Python and C++ linting, building and debugging tools +- Github Pull Requests management +- Github flavored markdown previewing +- Neuroimaging data viewer +- Test Data introspection +- Resource monitoring +- Remote development +- Live sharing with collaborators diff --git a/docs/environment/DEVOPS.md b/docs/environment/DEVOPS.md new file mode 100644 index 00000000..41d422f2 --- /dev/null +++ b/docs/environment/DEVOPS.md @@ -0,0 +1,129 @@ +# Developing within `nf-neuro` + +- [Development container](#development-container) +- [Manual installation](#manual-installation) + - [Dependencies](#dependencies) + - [Python environment](#python-environment) + - [Loading the project's environment](#loading-the-projects-environment) + - [Global environment](#global-environment) + - [Working with VS Code](#working-with-vs-code) +- [Installing Prettier and editorconfig](#installing-prettier-and-editorconfig) + +## Development container + +A complete `development container` that setups you in a matter of minutes is [available here](./DEVCONTAINER.md#development-environment). + +## Manual installation + +### Dependencies + +- Python ≥ 3.8, < 3.13 +- Docker ≥ 24 (we recommend using [Docker Desktop](https://www.docker.com/products/docker-desktop)) +- Java Runtime ≥ 11, ≤ 17 + - On Ubuntu, install `openjdk-jre-` packages +- Nextflow ≥ 23.04.0 +- nf-test ≥ 0.9.0 +- Node ≥ 14, `Prettier` and `editorconfig` (see [below](#installing-prettier-and-editorconfig)) + +> [!IMPORTANT] +> Nextflow might not detect the right `Java virtual machine` by default, more so if +> multiple versions of the runtime are installed. If so, you need to set the environment +> variable `JAVA_HOME` to target the right one. +> +> - Linux : look in `/usr/lib/jvm` for +> a folder named `java--openjdk-` and use it as `JAVA_HOME`. +> - MacOS : if the `Java jvm` is the preferential one, use `JAVA_HOME=$(/usr/libexec/java_home)`. +> Else, look into `/Library/Java/JavaVirtualMachines` for the folder with the correct +> runtime version (named `jdk_1.jdk`) and use the +> following : `/Library/Java/JavaVirtualMachines/jdk_1.jdk/Contents/Home`. + +### Python environment + +The project uses _poetry_ to manage python dependencies. To install it using pipx, +run the following commands : + +```bash +pip install pipx +pipx ensurepath +pipx install poetry==1.8.* +``` + +> [!NOTE] +> If the second command above fails, `pipx` cannot be found in the path. Prepend the +> second command with `$(which python) -m` and rerun the whole block. + +> [!WARNING] +> Poetry doesn't like when other python environments are activated around it. Make +> sure to deactivate any before calling `poetry` commands. + +Once done, install the project with : + +```bash +poetry install --no-root +``` + +### Loading the project's environment + +> [!IMPORTANT] +> Make sure no python environment is activated before running commands ! + +The project scripts and dependencies can be accessed using : + +```bash +poetry shell +``` + +which will activate the project's python environment in the current shell. + +> [!NOTE] +> You will know the poetry environment is activated by looking at your shell. The +> input line should be prefixed by : `(nf-neuro-tools-py)`, with `` +> being the actual Python version used in the environment. + +To exit the environment, simply enter the `exit` command in the shell. + +> [!IMPORTANT] +> Do not use traditional deactivation (calling `deactivate`), since it does not relinquish +> the environment gracefully, making it so you won't be able to reactivate it without +> exiting the shell. + +### Global environment + +Set the following environment variables in your `.bashrc` (or whatever is the equivalent for your shell) : + +```bash +export NFCORE_MODULES_GIT_REMOTE=https://github.com/scilus/nf-neuro.git +export NFCORE_MODULES_GIT_BRANCH=main +export NFCORE_SUBWORKFLOWS_GIT_REMOTE=https://github.com/scilus/nf-neuro.git +export NFCORE_SUBWORKFLOWS_GIT_BRANCH=main +``` + +This will make it so the `nf-core` commands target the right repository by default. Else, you'll need to add `--git-remote` and `--branch` options to pretty much all commands relating to `modules` and `subworkflows`. + +### Working with VS Code + +The `nf-neuro` project curates a bundle of useful extensions for Visual Studio Code, the `nf-neuro-extensions` package. You can find it easily on the [extension +marketplace](https://marketplace.visualstudio.com/items?itemName=nf-neuro.nf-neuro-extensionpack). + +## Installing Prettier and editorconfig + +To install **Prettier** and **editorconfig** for the project, you need to have `node` and `npm` installed on your system to at least version 14. On Ubuntu, you can do it using snap : + +```bash +sudo snap install node --classic +``` + +However, if you cannot install snap, or have another OS, refer to the [official documentation](https://nodejs.org/en/download/package-manager/) for the installation procedure. + +Under the current configuration for the _Development Container_, for this project, we use the following procedure, considering `${NODE_MAJOR}` is at least 14 for Prettier : + +```bash +curl -fsSL https://deb.nodesource.com/setup_${NODE_MAJOR}.x | bash - &&\ +apt-get install -y nodejs + +npm install --save-dev --save-exact prettier +npm install --save-dev --save-exact editorconfig-checker + +echo "function prettier() { npm exec prettier $@; }" >> ~/.bashrc +echo "function editorconfig-checker() { npm exec editorconfig-checker $@; }" >> ~/.bashrc +``` diff --git a/docs/environment/NFCORE.md b/docs/environment/NFCORE.md new file mode 100644 index 00000000..57abd9c4 --- /dev/null +++ b/docs/environment/NFCORE.md @@ -0,0 +1,49 @@ +# Basic environment setup + +- [Development container](#development-container) +- [Manual installation](#manual-installation) + - [Dependencies](#dependencies) + - [Configuration](#configuration) + +## Development container + +A lightweight `development container` that setups you in a matter of minutes is [available here](./DEVCONTAINER.md#prototyping-environment). + +## Manual installation + +### Dependencies + +- Python ≥ 3.8, < 3.13 +- Docker ≥ 24 (we recommend using [Docker Desktop](https://www.docker.com/products/docker-desktop)) +- Java Runtime ≥ 11, ≤ 17 + - On Ubuntu, install `openjdk-jre-` packages +- Nextflow ≥ 23.04.0 + +> [!IMPORTANT] +> Nextflow might not detect the right `Java virtual machine` by default, more so if +> multiple versions of the runtime are installed. If so, you need to set the environment +> variable `JAVA_HOME` to target the right one. +> +> - Linux : look in `/usr/lib/jvm` for +> a folder named `java--openjdk-` and use it as `JAVA_HOME`. +> - MacOS : if the `Java jvm` is the preferential one, use `JAVA_HOME=$(/usr/libexec/java_home)`. +> Else, look into `/Library/Java/JavaVirtualMachines` for the folder with the correct +> runtime version (named `jdk_1.jdk`) and use the +> following : `/Library/Java/JavaVirtualMachines/jdk_1.jdk/Contents/Home`. + +### Configuration + +Install `nf-core` in your `python` environment (we recommend using a `virtual environment`) : + +```bash +pip install nf-core==2.14.1 +``` + +Configure your current environment so `nf-core` commands have access to `nf-neuro` modules : + +```bash +export NFCORE_MODULES_GIT_REMOTE=https://github.com/scilus/nf-neuro.git +export NFCORE_MODULES_GIT_BRANCH=main +export NFCORE_SUBWORKFLOWS_GIT_REMOTE=https://github.com/scilus/nf-neuro.git +export NFCORE_SUBWORKFLOWS_GIT_BRANCH=main +``` diff --git a/docs/images/cli/nfcore_modules_info.png b/docs/images/cli/nfcore_modules_info.png new file mode 100644 index 00000000..738a9ce1 Binary files /dev/null and b/docs/images/cli/nfcore_modules_info.png differ diff --git a/docs/images/cli/nfcore_modules_list.png b/docs/images/cli/nfcore_modules_list.png new file mode 100644 index 00000000..cbe2a82d Binary files /dev/null and b/docs/images/cli/nfcore_modules_list.png differ diff --git a/docs/images/cli/nfcore_subworkflows_info.png b/docs/images/cli/nfcore_subworkflows_info.png new file mode 100644 index 00000000..b9062c79 Binary files /dev/null and b/docs/images/cli/nfcore_subworkflows_info.png differ diff --git a/docs/images/cli/nfcore_subworkflows_list.png b/docs/images/cli/nfcore_subworkflows_list.png new file mode 100644 index 00000000..7d993f1b Binary files /dev/null and b/docs/images/cli/nfcore_subworkflows_list.png differ