From fa869e98f7c6fdc38fabc96ac52bac30904c0b3a Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 8 Nov 2022 09:51:22 +0000 Subject: [PATCH 1/8] Bump pipeline version to 1.9dev --- CHANGELOG.md | 4 ++++ nextflow.config | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b57bfefc..5fd50bd1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [Unpublished Version / DEV] + +### Enhancements & fixes + ## [[1.8](https://github.com/nf-core/fetchngs/releases/tag/1.8)] - 2022-11-08 ### Enhancements & fixes diff --git a/nextflow.config b/nextflow.config index 633a0fb2..94cc7de4 100644 --- a/nextflow.config +++ b/nextflow.config @@ -174,7 +174,7 @@ manifest { description = 'Pipeline to fetch metadata and raw FastQ files from public databases' mainScript = 'main.nf' nextflowVersion = '!>=21.10.3' - version = '1.8' + version = '1.9dev' doi = 'https://doi.org/10.5281/zenodo.5070524' } From 73576379b479cd12f6780a952322d63383a63e8a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 8 Dec 2022 13:09:36 +0000 Subject: [PATCH 2/8] Template update for nf-core/tools version 2.7.1 --- .devcontainer/devcontainer.json | 27 +++++++++++++ .gitattributes | 1 + .github/CONTRIBUTING.md | 16 ++++++++ .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- .github/workflows/ci.yml | 8 +++- .github/workflows/fix-linting.yml | 6 +-- .github/workflows/linting.yml | 18 +++++---- .github/workflows/linting_comment.yml | 2 +- .prettierignore | 2 + CHANGELOG.md | 2 +- CITATION.cff | 56 --------------------------- README.md | 4 +- assets/slackreport.json | 34 ++++++++++++++++ docs/usage.md | 24 +++++++----- lib/NfcoreSchema.groovy | 1 - lib/NfcoreTemplate.groovy | 41 +++++++++++++++----- lib/WorkflowMain.groovy | 18 ++++++--- modules.json | 9 +++-- modules/local/samplesheet_check.nf | 4 ++ nextflow.config | 14 +++++-- nextflow_schema.json | 8 +++- workflows/fetchngs.nf | 11 +++--- 22 files changed, 195 insertions(+), 113 deletions(-) create mode 100644 .devcontainer/devcontainer.json delete mode 100644 CITATION.cff create mode 100644 assets/slackreport.json diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..ea27a584 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,27 @@ +{ + "name": "nfcore", + "image": "nfcore/gitpod:latest", + "remoteUser": "gitpod", + + // Configure tool-specific properties. + "customizations": { + // Configure properties specific to VS Code. + "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/opt/conda/bin/python", + "python.linting.enabled": true, + "python.linting.pylintEnabled": true, + "python.formatting.autopep8Path": "/opt/conda/bin/autopep8", + "python.formatting.yapfPath": "/opt/conda/bin/yapf", + "python.linting.flake8Path": "/opt/conda/bin/flake8", + "python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle", + "python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle", + "python.linting.pylintPath": "/opt/conda/bin/pylint" + }, + + // Add the IDs of extensions you want installed when the container is created. + "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] + } + } +} diff --git a/.gitattributes b/.gitattributes index 050bb120..7a2dabc2 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ *.config linguist-language=nextflow +*.nf.test linguist-language=nextflow modules/nf-core/** linguist-generated subworkflows/nf-core/** linguist-generated diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 1f2a7938..69af505f 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -101,3 +101,19 @@ If you are using a new feature from core Nextflow, you may bump the minimum requ ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). + +## GitHub Codespaces + +This repo includes a devcontainer configuration which will create a GitHub Codespaces for Nextflow development! This is an online developer environment that runs in your browser, complete with VSCode and a terminal. + +To get started: + +- Open the repo in [Codespaces](https://github.com/nf-core/fetchngs/codespaces) +- Tools installed + - nf-core + - Nextflow + +Devcontainer specs: + +- [DevContainer config](.devcontainer/devcontainer.json) +- [Dockerfile](.devcontainer/Dockerfile) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 8c2c9513..fea6264d 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -42,7 +42,7 @@ body: attributes: label: System information description: | - * Nextflow version _(eg. 21.10.3)_ + * Nextflow version _(eg. 22.10.1)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1da98e12..f0e03c7b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,6 +11,10 @@ on: env: NXF_ANSI_LOG: false +concurrency: + group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" + cancel-in-progress: true + jobs: test: name: Run pipeline with test data @@ -20,11 +24,11 @@ jobs: strategy: matrix: NXF_VER: - - "21.10.3" + - "22.10.1" - "latest-everything" steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 27e4431c..55000527 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -24,7 +24,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @prettier/plugin-php @@ -34,9 +34,9 @@ jobs: id: prettier_status run: | if prettier --check ${GITHUB_WORKSPACE}; then - echo "::set-output name=result::pass" + echo "name=result::pass" >> $GITHUB_OUTPUT else - echo "::set-output name=result::fail" + echo "name=result::fail" >> $GITHUB_OUTPUT fi - name: Run 'prettier --write' diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 8a5ce69b..858d622e 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -4,6 +4,8 @@ name: nf-core linting # that the code meets the nf-core guidelines. on: push: + branches: + - dev pull_request: release: types: [published] @@ -12,9 +14,9 @@ jobs: EditorConfig: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -25,9 +27,9 @@ jobs: Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 - name: Install Prettier run: npm install -g prettier @@ -38,7 +40,7 @@ jobs: PythonBlack: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Check code lints with Black uses: psf/black@stable @@ -69,12 +71,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.7" architecture: "x64" @@ -97,7 +99,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: linting-logs path: | diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml index 04758f61..39635186 100644 --- a/.github/workflows/linting_comment.yml +++ b/.github/workflows/linting_comment.yml @@ -18,7 +18,7 @@ jobs: - name: Get PR number id: pr_number - run: echo "::set-output name=pr_number::$(cat linting-logs/PR_number.txt)" + run: echo "name=pr_number::$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT - name: Post PR comment uses: marocchino/sticky-pull-request-comment@v2 diff --git a/.prettierignore b/.prettierignore index eb74a574..437d763d 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,5 +1,6 @@ email_template.html adaptivecard.json +slackreport.json .nextflow* work/ data/ @@ -8,3 +9,4 @@ results/ testing/ testing* *.pyc +bin/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 83d55de4..deef01a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## v1.8dev - [date] +## v1.9dev - [date] Initial release of nf-core/fetchngs, created with the [nf-core](https://nf-co.re/) template. diff --git a/CITATION.cff b/CITATION.cff deleted file mode 100644 index 017666c0..00000000 --- a/CITATION.cff +++ /dev/null @@ -1,56 +0,0 @@ -cff-version: 1.2.0 -message: "If you use `nf-core tools` in your work, please cite the `nf-core` publication" -authors: - - family-names: Ewels - given-names: Philip - - family-names: Peltzer - given-names: Alexander - - family-names: Fillinger - given-names: Sven - - family-names: Patel - given-names: Harshil - - family-names: Alneberg - given-names: Johannes - - family-names: Wilm - given-names: Andreas - - family-names: Garcia - given-names: Maxime Ulysse - - family-names: Di Tommaso - given-names: Paolo - - family-names: Nahnsen - given-names: Sven -title: "The nf-core framework for community-curated bioinformatics pipelines." -version: 2.4.1 -doi: 10.1038/s41587-020-0439-x -date-released: 2022-05-16 -url: https://github.com/nf-core/tools -prefered-citation: - type: article - authors: - - family-names: Ewels - given-names: Philip - - family-names: Peltzer - given-names: Alexander - - family-names: Fillinger - given-names: Sven - - family-names: Patel - given-names: Harshil - - family-names: Alneberg - given-names: Johannes - - family-names: Wilm - given-names: Andreas - - family-names: Garcia - given-names: Maxime Ulysse - - family-names: Di Tommaso - given-names: Paolo - - family-names: Nahnsen - given-names: Sven - doi: 10.1038/s41587-020-0439-x - journal: nature biotechnology - start: 276 - end: 278 - title: "The nf-core framework for community-curated bioinformatics pipelines." - issue: 3 - volume: 38 - year: 2020 - url: https://dx.doi.org/10.1038/s41587-020-0439-x diff --git a/README.md b/README.md index 20636e60..92478b96 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/fetchngs/results)[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A522.10.1-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -31,7 +31,7 @@ On release, automated continuous integration tests run the pipeline on a full-si ## Quick Start -1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=22.10.1`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) (you can follow [this tutorial](https://singularity-tutorial.github.io/01-installation/)), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(you can use [`Conda`](https://conda.io/miniconda.html) both to install Nextflow itself and also to manage software within pipelines. Please only use it within pipelines as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_. diff --git a/assets/slackreport.json b/assets/slackreport.json new file mode 100644 index 00000000..043d02f2 --- /dev/null +++ b/assets/slackreport.json @@ -0,0 +1,34 @@ +{ + "attachments": [ + { + "fallback": "Plain-text summary of the attachment.", + "color": "<% if (success) { %>good<% } else { %>danger<%} %>", + "author_name": "sanger-tol/readmapping v${version} - ${runName}", + "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", + "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", + "fields": [ + { + "title": "Command used to launch the workflow", + "value": "```${commandLine}```", + "short": false + } + <% + if (!success) { %> + , + { + "title": "Full error message", + "value": "```${errorReport}```", + "short": false + }, + { + "title": "Pipeline configuration", + "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", + "short": false + } + <% } + %> + ], + "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" + } + ] +} diff --git a/docs/usage.md b/docs/usage.md index 231046a5..b3c188a3 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -83,9 +83,9 @@ nextflow pull nf-core/fetchngs It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. -First, go to the [nf-core/fetchngs releases page](https://github.com/nf-core/fetchngs/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. +First, go to the [nf-core/fetchngs releases page](https://github.com/nf-core/fetchngs/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. -This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. +This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. For example, at the bottom of the MultiQC reports. ## Core Nextflow arguments @@ -95,7 +95,7 @@ This version number will be logged in reports when you run the pipeline, so that Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. -Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. When using Biocontainers, most of these software packaging methods pull Docker containers from quay.io e.g [FastQC](https://quay.io/repository/biocontainers/fastqc) except for Singularity which directly downloads Singularity images via https hosted by the [Galaxy project](https://depot.galaxyproject.org/singularity/) and Conda which downloads and installs software locally from [Bioconda](https://bioconda.github.io/). +Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. > We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. @@ -104,8 +104,11 @@ The pipeline also dynamically loads configurations from [https://github.com/nf-c Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! They are loaded in sequence, so later profiles can overwrite earlier profiles. -If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended. +If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer enviroment. +- `test` + - A profile with a complete configuration for automated testing + - Includes links to test data so needs no other parameters - `docker` - A generic configuration profile to be used with [Docker](https://docker.com/) - `singularity` @@ -118,9 +121,6 @@ If `-profile` is not specified, the pipeline will run locally and expect all sof - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) - `conda` - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. -- `test` - - A profile with a complete configuration for automated testing - - Includes links to test data so needs no other parameters ### `-resume` @@ -169,8 +169,14 @@ Work dir: Tip: you can replicate the issue by changing to the process work dir and entering the command `bash .command.run` ``` +#### For beginners + +A first step to bypass this error, you could try to increase the amount of CPUs, memory, and time for the whole pipeline. Therefor you can try to increase the resource for the parameters `--max_cpus`, `--max_memory`, and `--max_time`. Based on the error above, you have to increase the amount of memory. Therefore you can go to the [parameter documentation of rnaseq](https://nf-co.re/rnaseq/3.9/parameters) and scroll down to the `show hidden parameter` button to get the default value for `--max_memory`. In this case 128GB, you than can try to run your pipeline again with `--max_memory 200GB -resume` to skip all process, that were already calculated. If you can not increase the resource of the complete pipeline, you can try to adapt the resource for a single process as mentioned below. + +#### Advanced option on process level + To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). -We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so, based on the search results, the file we want is `modules/nf-core/software/star/align/main.nf`. +We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so, based on the search results, the file we want is `modules/nf-core/star/align/main.nf`. If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. @@ -189,7 +195,7 @@ process { > > If you get a warning suggesting that the process selector isn't recognised check that the process name has been specified correctly. -### Updating containers +### Updating containers (advanced users) The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. If for some reason you need to use a different version of a particular tool with the pipeline then you just need to identify the `process` name and override the Nextflow `container` definition for that process using the `withName` declaration. For example, in the [nf-core/viralrecon](https://nf-co.re/viralrecon) pipeline a tool called [Pangolin](https://github.com/cov-lineages/pangolin) has been used during the COVID-19 pandemic to assign lineages to SARS-CoV-2 genome sequenced samples. Given that the lineage assignments change quite frequently it doesn't make sense to re-release the nf-core/viralrecon everytime a new version of Pangolin has been released. However, you can override the default container used by the pipeline by creating a custom config file and passing it as a command-line argument via `-c custom.config`. diff --git a/lib/NfcoreSchema.groovy b/lib/NfcoreSchema.groovy index b3d092f8..33cd4f6e 100755 --- a/lib/NfcoreSchema.groovy +++ b/lib/NfcoreSchema.groovy @@ -46,7 +46,6 @@ class NfcoreSchema { 'quiet', 'syslog', 'v', - 'version', // Options for `nextflow run` command 'ansi', diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index 27feb009..25a0a74a 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -32,6 +32,25 @@ class NfcoreTemplate { } } + // + // Generate version string + // + public static String version(workflow) { + String version_string = "" + + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string + } + // // Construct and send completion email // @@ -61,7 +80,7 @@ class NfcoreTemplate { misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] - email_fields['version'] = workflow.manifest.version + email_fields['version'] = NfcoreTemplate.version(workflow) email_fields['runName'] = workflow.runName email_fields['success'] = workflow.success email_fields['dateComplete'] = workflow.complete @@ -146,10 +165,10 @@ class NfcoreTemplate { } // - // Construct and send adaptive card - // https://adaptivecards.io + // Construct and send a notification to a web server as JSON + // e.g. Microsoft Teams and Slack // - public static void adaptivecard(workflow, params, summary_params, projectDir, log) { + public static void IM_notification(workflow, params, summary_params, projectDir, log) { def hook_url = params.hook_url def summary = [:] @@ -170,7 +189,7 @@ class NfcoreTemplate { misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp def msg_fields = [:] - msg_fields['version'] = workflow.manifest.version + msg_fields['version'] = NfcoreTemplate.version(workflow) msg_fields['runName'] = workflow.runName msg_fields['success'] = workflow.success msg_fields['dateComplete'] = workflow.complete @@ -178,13 +197,16 @@ class NfcoreTemplate { msg_fields['exitStatus'] = workflow.exitStatus msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') msg_fields['errorReport'] = (workflow.errorReport ?: 'None') - msg_fields['commandLine'] = workflow.commandLine + msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") msg_fields['projectDir'] = workflow.projectDir msg_fields['summary'] = summary << misc_fields // Render the JSON template def engine = new groovy.text.GStringTemplateEngine() - def hf = new File("$projectDir/assets/adaptivecard.json") + // Different JSON depending on the service provider + // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format + def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" + def hf = new File("$projectDir/assets/${json_path}") def json_template = engine.createTemplate(hf).make(msg_fields) def json_message = json_template.toString() @@ -209,7 +231,7 @@ class NfcoreTemplate { if (workflow.stats.ignoredCount == 0) { log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" } } else { log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" @@ -297,6 +319,7 @@ class NfcoreTemplate { // public static String logo(workflow, monochrome_logs) { Map colors = logColours(monochrome_logs) + String workflow_version = NfcoreTemplate.version(workflow) String.format( """\n ${dashedLine(monochrome_logs)} @@ -305,7 +328,7 @@ class NfcoreTemplate { ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset} + ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset} ${dashedLine(monochrome_logs)} """.stripIndent() ) diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 23d4247f..d76b3675 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -19,7 +19,7 @@ class WorkflowMain { } // - // Print help to screen if required + // Generate help string // public static String help(workflow, params, log) { def command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" @@ -32,7 +32,7 @@ class WorkflowMain { } // - // Print parameter summary log to screen + // Generate parameter summary log string // public static String paramsSummaryLog(workflow, params, log) { def summary_log = '' @@ -53,15 +53,21 @@ class WorkflowMain { System.exit(0) } - // Validate workflow parameters via the JSON schema - if (params.validate_params) { - NfcoreSchema.validateParameters(workflow, params, log) + // Print workflow version and exit on --version + if (params.version) { + String workflow_version = NfcoreTemplate.version(workflow) + log.info "${workflow.manifest.name} ${workflow_version}" + System.exit(0) } // Print parameter summary log to screen - log.info paramsSummaryLog(workflow, params, log) + // Validate workflow parameters via the JSON schema + if (params.validate_params) { + NfcoreSchema.validateParameters(workflow, params, log) + } + // Check that a -profile or Nextflow config has been provided to run the pipeline NfcoreTemplate.checkConfigProvided(workflow, log) diff --git a/modules.json b/modules.json index 0ea8ef88..dc96d1e1 100644 --- a/modules.json +++ b/modules.json @@ -7,15 +7,18 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "installed_by": ["modules"] }, "fastqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "installed_by": ["modules"] } } } diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index 3f114f13..d95a15d3 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -1,5 +1,6 @@ process SAMPLESHEET_CHECK { tag "$samplesheet" + label 'process_single' conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? @@ -13,6 +14,9 @@ process SAMPLESHEET_CHECK { path '*.csv' , emit: csv path "versions.yml", emit: versions + when: + task.ext.when == null || task.ext.when + script: // This script is bundled with the pipeline, in nf-core/fetchngs/bin/ """ check_samplesheet.py \\ diff --git a/nextflow.config b/nextflow.config index 349fd4c6..33ff0e76 100644 --- a/nextflow.config +++ b/nextflow.config @@ -35,6 +35,7 @@ params { monochrome_logs = false hook_url = null help = false + version = false validate_params = true show_hidden_params = false schema_ignore_params = 'genomes' @@ -81,6 +82,7 @@ profiles { debug { process.beforeScript = 'echo $HOSTNAME' } conda { params.enable_conda = true + conda.enabled = true docker.enabled = false singularity.enabled = false podman.enabled = false @@ -89,6 +91,7 @@ profiles { } mamba { params.enable_conda = true + conda.enabled = true conda.useMamba = true docker.enabled = false singularity.enabled = false @@ -104,6 +107,9 @@ profiles { shifter.enabled = false charliecloud.enabled = false } + arm { + docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' + } singularity { singularity.enabled = true singularity.autoMounts = true @@ -185,12 +191,12 @@ dag { manifest { name = 'nf-core/fetchngs' - author = 'Harshil Patel, Moritz E. Beber and Jose Espinosa-Carrasco' + author = """Harshil Patel, Moritz E. Beber and Jose Espinosa-Carrasco""" homePage = 'https://github.com/nf-core/fetchngs' - description = 'Pipeline to fetch metadata and raw FastQ files from public databases' + description = """Pipeline to fetch metadata and raw FastQ files from public databases""" mainScript = 'main.nf' - nextflowVersion = '!>=21.10.3' - version = '1.8dev' + nextflowVersion = '!>=22.10.1' + version = '1.9dev' doi = '' } diff --git a/nextflow_schema.json b/nextflow_schema.json index 14b336cd..86023363 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -176,6 +176,12 @@ "fa_icon": "fas fa-question-circle", "hidden": true }, + "version": { + "type": "boolean", + "description": "Display version and exit.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, "publish_dir_mode": { "type": "string", "default": "copy", @@ -217,7 +223,7 @@ "type": "string", "description": "Incoming hook URL for messaging service", "fa_icon": "fas fa-people-group", - "help_text": "Incoming hook URL for messaging service. Currently, only MS Teams is supported.", + "help_text": "Incoming hook URL for messaging service. Currently, MS Teams and Slack are supported.", "hidden": true }, "multiqc_config": { diff --git a/workflows/fetchngs.nf b/workflows/fetchngs.nf index 057b6ad6..9af8723d 100644 --- a/workflows/fetchngs.nf +++ b/workflows/fetchngs.nf @@ -82,7 +82,7 @@ workflow FETCHNGS { ch_versions = ch_versions.mix(FASTQC.out.versions.first()) CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') + ch_versions.unique{ it.text }.collectFile(name: 'collated_versions.yml') ) // @@ -102,12 +102,11 @@ workflow FETCHNGS { MULTIQC ( ch_multiqc_files.collect(), - ch_multiqc_config.collect().ifEmpty([]), - ch_multiqc_custom_config.collect().ifEmpty([]), - ch_multiqc_logo.collect().ifEmpty([]) + ch_multiqc_config.toList(), + ch_multiqc_custom_config.toList(), + ch_multiqc_logo.toList() ) multiqc_report = MULTIQC.out.report.toList() - ch_versions = ch_versions.mix(MULTIQC.out.versions) } /* @@ -122,7 +121,7 @@ workflow.onComplete { } NfcoreTemplate.summary(workflow, params, log) if (params.hook_url) { - NfcoreTemplate.adaptivecard(workflow, params, summary_params, projectDir, log) + NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) } } From d5e40d229ff308858d3a34abdbe3ffc6f7123b67 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 19 Dec 2022 12:04:52 +0000 Subject: [PATCH 3/8] Template update for nf-core/tools version 2.7.2 --- .github/workflows/fix-linting.yml | 4 +- .github/workflows/linting_comment.yml | 2 +- lib/WorkflowMain.groovy | 2 +- modules.json | 6 +- modules/local/samplesheet_check.nf | 2 +- .../custom/dumpsoftwareversions/main.nf | 2 +- .../templates/dumpsoftwareversions.py | 99 ++++++++++--------- modules/nf-core/fastqc/main.nf | 40 +++----- modules/nf-core/multiqc/main.nf | 2 +- nextflow.config | 3 - nextflow_schema.json | 6 -- workflows/fetchngs.nf | 2 +- 12 files changed, 82 insertions(+), 88 deletions(-) mode change 100644 => 100755 modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 55000527..6e772fce 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -34,9 +34,9 @@ jobs: id: prettier_status run: | if prettier --check ${GITHUB_WORKSPACE}; then - echo "name=result::pass" >> $GITHUB_OUTPUT + echo "result=pass" >> $GITHUB_OUTPUT else - echo "name=result::fail" >> $GITHUB_OUTPUT + echo "result=fail" >> $GITHUB_OUTPUT fi - name: Run 'prettier --write' diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml index 39635186..0bbcd30f 100644 --- a/.github/workflows/linting_comment.yml +++ b/.github/workflows/linting_comment.yml @@ -18,7 +18,7 @@ jobs: - name: Get PR number id: pr_number - run: echo "name=pr_number::$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT + run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT - name: Post PR comment uses: marocchino/sticky-pull-request-comment@v2 diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index d76b3675..30fcedde 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -72,7 +72,7 @@ class WorkflowMain { NfcoreTemplate.checkConfigProvided(workflow, log) // Check that conda channels are set-up correctly - if (params.enable_conda) { + if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { Utils.checkCondaChannels(log) } diff --git a/modules.json b/modules.json index dc96d1e1..a306549a 100644 --- a/modules.json +++ b/modules.json @@ -7,17 +7,17 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", "installed_by": ["modules"] }, "fastqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", "installed_by": ["modules"] } } diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index d95a15d3..a0cc023e 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -2,7 +2,7 @@ process SAMPLESHEET_CHECK { tag "$samplesheet" label 'process_single' - conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) + conda "conda-forge::python=3.8.3" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/python:3.8.3' : 'quay.io/biocontainers/python:3.8.3' }" diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf index cebb6e05..3df21765 100644 --- a/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ b/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -2,7 +2,7 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py old mode 100644 new mode 100755 index 787bdb7b..e55b8d43 --- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -1,5 +1,9 @@ #!/usr/bin/env python + +"""Provide functions to merge multiple versions.yml files.""" + + import platform from textwrap import dedent @@ -7,6 +11,7 @@ def _make_versions_html(versions): + """Generate a tabular HTML output of all versions for MultiQC.""" html = [ dedent( """\\ @@ -45,47 +50,53 @@ def _make_versions_html(versions): return "\\n".join(html) -versions_this_module = {} -versions_this_module["${task.process}"] = { - "python": platform.python_version(), - "yaml": yaml.__version__, -} - -with open("$versions") as f: - versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module - -# aggregate versions by the module name (derived from fully-qualified process name) -versions_by_module = {} -for process, process_versions in versions_by_process.items(): - module = process.split(":")[-1] - try: - if versions_by_module[module] != process_versions: - raise AssertionError( - "We assume that software versions are the same between all modules. " - "If you see this error-message it means you discovered an edge-case " - "and should open an issue in nf-core/tools. " - ) - except KeyError: - versions_by_module[module] = process_versions - -versions_by_module["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version", -} - -versions_mqc = { - "id": "software_versions", - "section_name": "${workflow.manifest.name} Software Versions", - "section_href": "https://github.com/${workflow.manifest.name}", - "plot_type": "html", - "description": "are collected at run time from the software output.", - "data": _make_versions_html(versions_by_module), -} - -with open("software_versions.yml", "w") as f: - yaml.dump(versions_by_module, f, default_flow_style=False) -with open("software_versions_mqc.yml", "w") as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - -with open("versions.yml", "w") as f: - yaml.dump(versions_this_module, f, default_flow_style=False) +def main(): + """Load all version files and generate merged output.""" + versions_this_module = {} + versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, + } + + with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + + # aggregate versions by the module name (derived from fully-qualified process name) + versions_by_module = {} + for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + if versions_by_module[module] != process_versions: + raise AssertionError( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + + versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", + } + + versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), + } + + with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) + with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + + with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) + + +if __name__ == "__main__": + main() diff --git a/modules/nf-core/fastqc/main.nf b/modules/nf-core/fastqc/main.nf index 05730368..9ae58381 100644 --- a/modules/nf-core/fastqc/main.nf +++ b/modules/nf-core/fastqc/main.nf @@ -2,7 +2,7 @@ process FASTQC { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) + conda "bioconda::fastqc=0.11.9" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : 'quay.io/biocontainers/fastqc:0.11.9--0' }" @@ -20,30 +20,22 @@ process FASTQC { script: def args = task.ext.args ?: '' - // Add soft-links to original FastQs for consistent naming in pipeline def prefix = task.ext.prefix ?: "${meta.id}" - if (meta.single_end) { - """ - [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz - fastqc $args --threads $task.cpus ${prefix}.fastq.gz - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) - END_VERSIONS - """ - } else { - """ - [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz - [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz - fastqc $args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) - END_VERSIONS - """ - } + // Make list of old name and new name pairs to use for renaming in the bash while loop + def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } + def rename_to = old_new_pairs*.join(' ').join(' ') + def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + """ + printf "%s %s\\n" $rename_to | while read old_name new_name; do + [ -f "\${new_name}" ] || ln -s \$old_name \$new_name + done + fastqc $args --threads $task.cpus $renamed_files + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS + """ stub: def prefix = task.ext.prefix ?: "${meta.id}" diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf index a8159a57..68f66bea 100644 --- a/modules/nf-core/multiqc/main.nf +++ b/modules/nf-core/multiqc/main.nf @@ -1,7 +1,7 @@ process MULTIQC { label 'process_single' - conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + conda "bioconda::multiqc=1.13" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" diff --git a/nextflow.config b/nextflow.config index 33ff0e76..5a863a50 100644 --- a/nextflow.config +++ b/nextflow.config @@ -39,7 +39,6 @@ params { validate_params = true show_hidden_params = false schema_ignore_params = 'genomes' - enable_conda = false // Config options @@ -81,7 +80,6 @@ try { profiles { debug { process.beforeScript = 'echo $HOSTNAME' } conda { - params.enable_conda = true conda.enabled = true docker.enabled = false singularity.enabled = false @@ -90,7 +88,6 @@ profiles { charliecloud.enabled = false } mamba { - params.enable_conda = true conda.enabled = true conda.useMamba = true docker.enabled = false diff --git a/nextflow_schema.json b/nextflow_schema.json index 86023363..b0720ed5 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -263,12 +263,6 @@ "description": "Show all params when using `--help`", "hidden": true, "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." - }, - "enable_conda": { - "type": "boolean", - "description": "Run this workflow with Conda. You can also use '-profile conda' instead of providing this parameter.", - "hidden": true, - "fa_icon": "fas fa-bacon" } } } diff --git a/workflows/fetchngs.nf b/workflows/fetchngs.nf index 9af8723d..4cb46a27 100644 --- a/workflows/fetchngs.nf +++ b/workflows/fetchngs.nf @@ -82,7 +82,7 @@ workflow FETCHNGS { ch_versions = ch_versions.mix(FASTQC.out.versions.first()) CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique{ it.text }.collectFile(name: 'collated_versions.yml') + ch_versions.unique().collectFile(name: 'collated_versions.yml') ) // From afd22a4581d52f513db355e4f61b5e1e918e0721 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 20 Dec 2022 17:58:18 +0000 Subject: [PATCH 4/8] Update all modules and subworkflows --- modules.json | 10 ++++++---- modules/local/multiqc_mappings_config.nf | 2 +- modules/local/sra_fastq_ftp.nf | 2 +- modules/local/sra_ids_to_runinfo.nf | 2 +- modules/local/sra_merge_samplesheet.nf | 2 +- modules/local/sra_runinfo_to_ftp.nf | 2 +- modules/local/synapse_get.nf | 2 +- modules/local/synapse_list.nf | 2 +- modules/local/synapse_merge_samplesheet.nf | 2 +- modules/local/synapse_show.nf | 2 +- .../templates/dumpsoftwareversions.py | 3 +-- modules/nf-core/custom/sratoolsncbisettings/main.nf | 2 +- modules/nf-core/sratools/fasterqdump/main.nf | 2 +- modules/nf-core/sratools/prefetch/main.nf | 2 +- 14 files changed, 19 insertions(+), 18 deletions(-) diff --git a/modules.json b/modules.json index e125fc48..c8cd187a 100644 --- a/modules.json +++ b/modules.json @@ -13,16 +13,17 @@ "custom/sratoolsncbisettings": { "branch": "master", "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", - "installed_by": ["modules"] + "installed_by": ["fastq_download_prefetch_fasterqdump_sratools"] }, "sratools/fasterqdump": { "branch": "master", - "git_sha": "03711bcb7fa2a7088eb54abb1fca326d30e602c2" + "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", + "installed_by": ["fastq_download_prefetch_fasterqdump_sratools"] }, "sratools/prefetch": { "branch": "master", "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", - "installed_by": ["modules"] + "installed_by": ["fastq_download_prefetch_fasterqdump_sratools"] } } }, @@ -30,7 +31,8 @@ "nf-core": { "fastq_download_prefetch_fasterqdump_sratools": { "branch": "master", - "git_sha": "03711bcb7fa2a7088eb54abb1fca326d30e602c2" + "git_sha": "03711bcb7fa2a7088eb54abb1fca326d30e602c2", + "installed_by": ["subworkflows"] } } } diff --git a/modules/local/multiqc_mappings_config.nf b/modules/local/multiqc_mappings_config.nf index 31f06dae..4f6c95bd 100644 --- a/modules/local/multiqc_mappings_config.nf +++ b/modules/local/multiqc_mappings_config.nf @@ -1,7 +1,7 @@ process MULTIQC_MAPPINGS_CONFIG { - conda (params.enable_conda ? "conda-forge::python=3.9.5" : null) + conda "conda-forge::python=3.9.5" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/python:3.9--1' : 'quay.io/biocontainers/python:3.9--1' }" diff --git a/modules/local/sra_fastq_ftp.nf b/modules/local/sra_fastq_ftp.nf index 5e4f6a05..464a327e 100644 --- a/modules/local/sra_fastq_ftp.nf +++ b/modules/local/sra_fastq_ftp.nf @@ -4,7 +4,7 @@ process SRA_FASTQ_FTP { label 'process_low' label 'error_retry' - conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0' : null) + conda "bioconda::sra-tools=2.11.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5321ha49a11a_3' : 'quay.io/biocontainers/sra-tools:2.11.0--pl5321ha49a11a_3' }" diff --git a/modules/local/sra_ids_to_runinfo.nf b/modules/local/sra_ids_to_runinfo.nf index 57fdb10e..49c83554 100644 --- a/modules/local/sra_ids_to_runinfo.nf +++ b/modules/local/sra_ids_to_runinfo.nf @@ -3,7 +3,7 @@ process SRA_IDS_TO_RUNINFO { tag "$id" label 'error_retry' - conda (params.enable_conda ? "conda-forge::python=3.9.5" : null) + conda "conda-forge::python=3.9.5" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/python:3.9--1' : 'quay.io/biocontainers/python:3.9--1' }" diff --git a/modules/local/sra_merge_samplesheet.nf b/modules/local/sra_merge_samplesheet.nf index ca9c0068..4b94a823 100644 --- a/modules/local/sra_merge_samplesheet.nf +++ b/modules/local/sra_merge_samplesheet.nf @@ -1,6 +1,6 @@ process SRA_MERGE_SAMPLESHEET { - conda (params.enable_conda ? "conda-forge::sed=4.7" : null) + conda "conda-forge::sed=4.7" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : 'ubuntu:20.04' }" diff --git a/modules/local/sra_runinfo_to_ftp.nf b/modules/local/sra_runinfo_to_ftp.nf index 13fa6c3e..3a060f7b 100644 --- a/modules/local/sra_runinfo_to_ftp.nf +++ b/modules/local/sra_runinfo_to_ftp.nf @@ -1,7 +1,7 @@ process SRA_RUNINFO_TO_FTP { - conda (params.enable_conda ? "conda-forge::python=3.9.5" : null) + conda "conda-forge::python=3.9.5" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/python:3.9--1' : 'quay.io/biocontainers/python:3.9--1' }" diff --git a/modules/local/synapse_get.nf b/modules/local/synapse_get.nf index b494bafd..f43e4d5e 100644 --- a/modules/local/synapse_get.nf +++ b/modules/local/synapse_get.nf @@ -4,7 +4,7 @@ process SYNAPSE_GET { label 'process_low' label 'error_retry' - conda (params.enable_conda ? "bioconda::synapseclient=2.6.0" : null) + conda "bioconda::synapseclient=2.6.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/synapseclient:2.6.0--pyh5e36f6f_0' : 'quay.io/biocontainers/synapseclient:2.6.0--pyh5e36f6f_0' }" diff --git a/modules/local/synapse_list.nf b/modules/local/synapse_list.nf index c3861bff..f42357ab 100644 --- a/modules/local/synapse_list.nf +++ b/modules/local/synapse_list.nf @@ -3,7 +3,7 @@ process SYNAPSE_LIST { tag "$id" label 'process_low' - conda (params.enable_conda ? "bioconda::synapseclient=2.6.0" : null) + conda "bioconda::synapseclient=2.6.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/synapseclient:2.6.0--pyh5e36f6f_0' : 'quay.io/biocontainers/synapseclient:2.6.0--pyh5e36f6f_0' }" diff --git a/modules/local/synapse_merge_samplesheet.nf b/modules/local/synapse_merge_samplesheet.nf index 68bfd68b..f46a1fbf 100644 --- a/modules/local/synapse_merge_samplesheet.nf +++ b/modules/local/synapse_merge_samplesheet.nf @@ -1,7 +1,7 @@ process SYNAPSE_MERGE_SAMPLESHEET { - conda (params.enable_conda ? "conda-forge::sed=4.7" : null) + conda "conda-forge::sed=4.7" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : 'ubuntu:20.04' }" diff --git a/modules/local/synapse_show.nf b/modules/local/synapse_show.nf index a7a12767..0bd6cc12 100644 --- a/modules/local/synapse_show.nf +++ b/modules/local/synapse_show.nf @@ -3,7 +3,7 @@ process SYNAPSE_SHOW { tag "$id" label 'process_low' - conda (params.enable_conda ? "bioconda::synapseclient=2.6.0" : null) + conda "bioconda::synapseclient=2.6.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/synapseclient:2.6.0--pyh5e36f6f_0' : 'quay.io/biocontainers/synapseclient:2.6.0--pyh5e36f6f_0' }" diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py index e55b8d43..da033408 100755 --- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -4,11 +4,10 @@ """Provide functions to merge multiple versions.yml files.""" +import yaml import platform from textwrap import dedent -import yaml - def _make_versions_html(versions): """Generate a tabular HTML output of all versions for MultiQC.""" diff --git a/modules/nf-core/custom/sratoolsncbisettings/main.nf b/modules/nf-core/custom/sratoolsncbisettings/main.nf index 21bf3005..36be10d3 100644 --- a/modules/nf-core/custom/sratoolsncbisettings/main.nf +++ b/modules/nf-core/custom/sratoolsncbisettings/main.nf @@ -2,7 +2,7 @@ process CUSTOM_SRATOOLSNCBISETTINGS { tag 'ncbi-settings' label 'process_low' - conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0' : null) + conda "bioconda::sra-tools=2.11.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5321ha49a11a_3' : 'quay.io/biocontainers/sra-tools:2.11.0--pl5321ha49a11a_3' }" diff --git a/modules/nf-core/sratools/fasterqdump/main.nf b/modules/nf-core/sratools/fasterqdump/main.nf index 1ffb21f0..ca5ee763 100644 --- a/modules/nf-core/sratools/fasterqdump/main.nf +++ b/modules/nf-core/sratools/fasterqdump/main.nf @@ -2,7 +2,7 @@ process SRATOOLS_FASTERQDUMP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0 conda-forge::pigz=2.6' : null) + conda "bioconda::sra-tools=2.11.0 conda-forge::pigz=2.6" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' : 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' }" diff --git a/modules/nf-core/sratools/prefetch/main.nf b/modules/nf-core/sratools/prefetch/main.nf index 3426ae12..57e8a3c9 100644 --- a/modules/nf-core/sratools/prefetch/main.nf +++ b/modules/nf-core/sratools/prefetch/main.nf @@ -2,7 +2,7 @@ process SRATOOLS_PREFETCH { tag "$id" label 'process_low' - conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0' : null) + conda "bioconda::sra-tools=2.11.0" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5321ha49a11a_3' : 'quay.io/biocontainers/sra-tools:2.11.0--pl5321ha49a11a_3' }" From e4a30d2a4c0160f70e2b5f6e1bf9fc93e703ff66 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 20 Dec 2022 20:01:33 +0000 Subject: [PATCH 5/8] Add --nf_core_rnaseq_strandedness and support for nf-core/atacseq --- CHANGELOG.md | 3 ++ README.md | 3 +- docs/usage.md | 5 +- modules/local/sra_to_samplesheet.nf | 5 +- modules/local/synapse_to_samplesheet.nf | 7 ++- nextflow.config | 61 +++++++++++++------------ nextflow_schema.json | 35 ++++++++++++-- workflows/sra.nf | 1 + workflows/synapse.nf | 3 +- 9 files changed, 83 insertions(+), 40 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c80fbebf..04968c93 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Enhancements & fixes +- Bumped minimum Nextflow version from `21.10.3` -> `22.10.1` - Updated pipeline template to [nf-core/tools 2.7.2](https://github.com/nf-core/tools/releases/tag/2.7.2) +- Added support for generating nf-core/atacseq compatible samplesheets +- Added `--nf_core_rnaseq_strandedness` parameter to specify value for `strandedness` entry added to samplesheet created when using `--nf_core_pipeline rnaseq`. The default is `auto` which can be used with nf-core/rnaseq v3.10 onwards to auto-detect strandedness during the pipeline execution. ## [[1.8](https://github.com/nf-core/fetchngs/releases/tag/1.8)] - 2022-11-08 diff --git a/README.md b/README.md index 161d0218..fdc60a60 100644 --- a/README.md +++ b/README.md @@ -55,10 +55,11 @@ This downloads a text file called `SRR_Acc_List.txt` that can be directly provid The columns in the auto-created samplesheet can be tailored to be accepted out-of-the-box by selected nf-core pipelines, these currently include: - [nf-core/rnaseq](https://nf-co.re/rnaseq/usage#samplesheet-input) +- [nf-core/atacseq](https://nf-co.re/atacseq/usage#samplesheet-input) - Ilumina processing mode of [nf-core/viralrecon](https://nf-co.re/viralrecon/usage#illumina-samplesheet-format) - [nf-core/taxprofiler](https://nf-co.re/nf-core/taxprofiler) -You can use the `--nf_core_pipeline` parameter to customise this behaviour e.g. `--nf_core_pipeline rnaseq`. More pipelines will be supported in due course as we adopt and standardise samplesheet input across nf-core. +See [usage docs](https://nf-co.re/fetchngs/1.8/usage#samplesheet-format) for more details. ## Quick Start diff --git a/docs/usage.md b/docs/usage.md index f78879d4..9172b403 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -58,10 +58,13 @@ The final sample information for the FastQ files used for samplesheet generation As a bonus, the columns in the auto-created samplesheet can be tailored to be accepted out-of-the-box by selected nf-core pipelines, these currently include: - [nf-core/rnaseq](https://nf-co.re/rnaseq/usage#samplesheet-input) +- [nf-core/atacseq](https://nf-co.re/atacseq/usage#samplesheet-input) - Ilumina processing mode of [nf-core/viralrecon](https://nf-co.re/viralrecon/usage#illumina-samplesheet-format) - [nf-core/taxprofiler](https://nf-co.re/nf-core/taxprofiler) -You can use the `--nf_core_pipeline` parameter to customise this behaviour e.g. `--nf_core_pipeline rnaseq`. More pipelines will be supported in due course as we adopt and standardise samplesheet input across nf-core. It is highly recommended that you double-check that all of the identifiers you defined using `--input` are represented in the samplesheet. Also, public databases don't reliably hold information such as strandedness information so you may need to amend these entries too if for example your samplesheet was created by providing `--nf_core_pipeline rnaseq`. +You can use the `--nf_core_pipeline` parameter to customise this behaviour e.g. `--nf_core_pipeline rnaseq`. More pipelines will be supported in due course as we adopt and standardise samplesheet input across nf-core. It is highly recommended that you double-check that all of the identifiers required by the downstream nf-core pipeline are accurately represented in the samplesheet. For example, the nf-core/atacseq pipeline requires a `replicate` column to be provided in it's input samplehsheet, however, public databases don't reliably hold information regarding replicates so you may need to amend these entries if your samplesheet was created by providing `--nf_core_pipeline atacseq`. + +From v1.9 of this pipeline the default `strandedness` in the output samplesheet will be set to `auto` when using `--nf_core_pipeline rnaseq`. This will only work with v3.10 onwards of nf-core/rnaseq which permits the auto-detection of strandedness during the pipeline execution. You can change this behaviour with the `--nf_core_rnaseq_strandedness` parameter which is set to `auto` by default. ### Bypass `FTP` data download diff --git a/modules/local/sra_to_samplesheet.nf b/modules/local/sra_to_samplesheet.nf index 6dce588b..92edf5df 100644 --- a/modules/local/sra_to_samplesheet.nf +++ b/modules/local/sra_to_samplesheet.nf @@ -8,6 +8,7 @@ process SRA_TO_SAMPLESHEET { input: val meta val pipeline + val strandedness val mapping_fields output: @@ -38,7 +39,9 @@ process SRA_TO_SAMPLESHEET { // Add nf-core pipeline specific entries if (pipeline) { if (pipeline == 'rnaseq') { - pipeline_map << [ strandedness: 'unstranded' ] + pipeline_map << [ strandedness: strandedness ] + } else if (pipeline == 'atacseq') { + pipeline_map << [ replicate: 1 ] } else if (pipeline == 'taxprofiler') { pipeline_map << [ fasta: '' ] } diff --git a/modules/local/synapse_to_samplesheet.nf b/modules/local/synapse_to_samplesheet.nf index 2e7945ed..393203de 100644 --- a/modules/local/synapse_to_samplesheet.nf +++ b/modules/local/synapse_to_samplesheet.nf @@ -8,6 +8,7 @@ process SYNAPSE_TO_SAMPLESHEET { input: tuple val(meta), path(fastq) val pipeline + val strandedness output: tuple val(meta), path("*.csv"), emit: samplesheet @@ -35,7 +36,11 @@ process SYNAPSE_TO_SAMPLESHEET { // Add nf-core pipeline specific entries if (pipeline) { if (pipeline == 'rnaseq') { - pipeline_map << [ strandedness: 'unstranded' ] + pipeline_map << [ strandedness: strandedness ] + } else if (pipeline == 'atacseq') { + pipeline_map << [ replicate: 1 ] + } else if (pipeline == 'taxprofiler') { + pipeline_map << [ fasta: '' ] } } pipeline_map << meta_map diff --git a/nextflow.config b/nextflow.config index 0dfff0e4..c212bcd3 100644 --- a/nextflow.config +++ b/nextflow.config @@ -10,43 +10,44 @@ params { // Input options - input = null - input_type = 'sra' - nf_core_pipeline = null - ena_metadata_fields = null - sample_mapping_fields = 'experiment_accession,run_accession,sample_accession,experiment_alias,run_alias,sample_alias,experiment_title,sample_title,sample_description,description' - synapse_config = null - force_sratools_download = false - skip_fastq_download = false + input = null + input_type = 'sra' + nf_core_pipeline = null + nf_core_rnaseq_strandedness = 'auto' + ena_metadata_fields = null + sample_mapping_fields = 'experiment_accession,run_accession,sample_accession,experiment_alias,run_alias,sample_alias,experiment_title,sample_title,sample_description,description' + synapse_config = null + force_sratools_download = false + skip_fastq_download = false // Boilerplate options - outdir = null - tracedir = "${params.outdir}/pipeline_info" - publish_dir_mode = 'copy' - email = null - email_on_fail = null - plaintext_email = false - monochrome_logs = false - hook_url = null - help = false - version = false - validate_params = true - show_hidden_params = false - schema_ignore_params = 'genomes,igenomes_base' + outdir = null + tracedir = "${params.outdir}/pipeline_info" + publish_dir_mode = 'copy' + email = null + email_on_fail = null + plaintext_email = false + monochrome_logs = false + hook_url = null + help = false + version = false + validate_params = true + show_hidden_params = false + schema_ignore_params = 'genomes,igenomes_base' // Config options - custom_config_version = 'master' - custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" - config_profile_description = null - config_profile_contact = null - config_profile_url = null - config_profile_name = null + custom_config_version = 'master' + custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" + config_profile_description = null + config_profile_contact = null + config_profile_url = null + config_profile_name = null // Max resource options // Defaults only, expecting to be overwritten - max_memory = '128.GB' - max_cpus = 16 - max_time = '240.h' + max_memory = '128.GB' + max_cpus = 16 + max_time = '240.h' } diff --git a/nextflow_schema.json b/nextflow_schema.json index c7e223bc..278a58a1 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -10,7 +10,10 @@ "type": "object", "fa_icon": "fas fa-terminal", "description": "Define where the pipeline should find input data and save output data.", - "required": ["input", "outdir"], + "required": [ + "input", + "outdir" + ], "properties": { "input": { "type": "string", @@ -26,7 +29,10 @@ "default": "sra", "description": "Specifies the type of identifier provided via `--input` - available options are 'sra' and 'synapse'.", "fa_icon": "fas fa-keyboard", - "enum": ["sra", "synapse"] + "enum": [ + "sra", + "synapse" + ] }, "ena_metadata_fields": { "type": "string", @@ -43,8 +49,20 @@ "nf_core_pipeline": { "type": "string", "fa_icon": "fab fa-apple", - "description": "Name of supported nf-core pipeline e.g. 'rnaseq'. A samplesheet for direct use with the pipeline will be created with the appropriate columns.", - "enum": ["rnaseq", "viralrecon", "taxprofiler"] + "description": "Name of supported nf-core pipeline e.g. 'rnaseq'. A samplesheet for direct use with the pipeline will be created with the appropriate columns.", + "enum": [ + "rnaseq", + "atacseq", + "viralrecon", + "taxprofiler" + ] + }, + "nf_core_rnaseq_strandedness": { + "type": "string", + "fa_icon": "fas fa-car", + "description": "Value for 'strandedness' entry added to samplesheet created when using '--nf_core_pipeline rnaseq'.", + "help_text": "The default is 'auto' which can be used with nf-core/rnaseq v3.10 onwards to auto-detect strandedness during the pipeline execution.", + "default": "auto" }, "force_sratools_download": { "type": "boolean", @@ -186,7 +204,14 @@ "description": "Method used to save pipeline results to output directory.", "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", "fa_icon": "fas fa-copy", - "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], + "enum": [ + "symlink", + "rellink", + "link", + "copy", + "copyNoFollow", + "move" + ], "hidden": true }, "email_on_fail": { diff --git a/workflows/sra.nf b/workflows/sra.nf index 9f38e7f8..1085598b 100644 --- a/workflows/sra.nf +++ b/workflows/sra.nf @@ -138,6 +138,7 @@ workflow SRA { SRA_TO_SAMPLESHEET ( ch_sra_metadata, params.nf_core_pipeline ?: '', + params.nf_core_rnaseq_strandedness ?: 'auto', params.sample_mapping_fields ) diff --git a/workflows/synapse.nf b/workflows/synapse.nf index 1203056a..f2138a60 100644 --- a/workflows/synapse.nf +++ b/workflows/synapse.nf @@ -115,7 +115,8 @@ workflow SYNAPSE { // SYNAPSE_TO_SAMPLESHEET ( ch_fastq, - params.nf_core_pipeline ?: '' + params.nf_core_pipeline ?: '', + params.nf_core_rnaseq_strandedness ?: 'auto' ) // From b50e1f64728f51a7a1d12c6f6ce2a00db8bef99f Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 20 Dec 2022 20:02:46 +0000 Subject: [PATCH 6/8] Bump pipeline version to 1.9 --- CHANGELOG.md | 2 +- nextflow.config | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 04968c93..37f9ea6d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unpublished Version / DEV] +## [[1.9](https://github.com/nf-core/rnaseq/releases/tag/1.9)] - 2022-12-21 ### Enhancements & fixes diff --git a/nextflow.config b/nextflow.config index c212bcd3..d6686ae4 100644 --- a/nextflow.config +++ b/nextflow.config @@ -177,7 +177,7 @@ manifest { description = """Pipeline to fetch metadata and raw FastQ files from public databases""" mainScript = 'main.nf' nextflowVersion = '!>=22.10.1' - version = '1.9dev' + version = '1.9' doi = 'https://doi.org/10.5281/zenodo.5070524' } From 4f84832800e7c8ca1819d47efbddff8d7248e682 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 20 Dec 2022 21:52:58 +0000 Subject: [PATCH 7/8] Fix prettier --- nextflow_schema.json | 26 ++++---------------------- 1 file changed, 4 insertions(+), 22 deletions(-) diff --git a/nextflow_schema.json b/nextflow_schema.json index 278a58a1..696914e4 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -10,10 +10,7 @@ "type": "object", "fa_icon": "fas fa-terminal", "description": "Define where the pipeline should find input data and save output data.", - "required": [ - "input", - "outdir" - ], + "required": ["input", "outdir"], "properties": { "input": { "type": "string", @@ -29,10 +26,7 @@ "default": "sra", "description": "Specifies the type of identifier provided via `--input` - available options are 'sra' and 'synapse'.", "fa_icon": "fas fa-keyboard", - "enum": [ - "sra", - "synapse" - ] + "enum": ["sra", "synapse"] }, "ena_metadata_fields": { "type": "string", @@ -50,12 +44,7 @@ "type": "string", "fa_icon": "fab fa-apple", "description": "Name of supported nf-core pipeline e.g. 'rnaseq'. A samplesheet for direct use with the pipeline will be created with the appropriate columns.", - "enum": [ - "rnaseq", - "atacseq", - "viralrecon", - "taxprofiler" - ] + "enum": ["rnaseq", "atacseq", "viralrecon", "taxprofiler"] }, "nf_core_rnaseq_strandedness": { "type": "string", @@ -204,14 +193,7 @@ "description": "Method used to save pipeline results to output directory.", "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", "fa_icon": "fas fa-copy", - "enum": [ - "symlink", - "rellink", - "link", - "copy", - "copyNoFollow", - "move" - ], + "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], "hidden": true }, "email_on_fail": { From 4f516556abd9af888eb9e747d16c39ed451c1153 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Wed, 21 Dec 2022 12:15:14 +0000 Subject: [PATCH 8/8] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 37f9ea6d..54242118 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [[1.9](https://github.com/nf-core/rnaseq/releases/tag/1.9)] - 2022-12-21 +## [[1.9](https://github.com/nf-core/fetchngs/releases/tag/1.9)] - 2022-12-21 ### Enhancements & fixes