diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 566ef509..53480a37 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,28 +1,70 @@ -name: nf-core CI # This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors +name: nf-core CI on: pull_request: release: types: [published] + merge_group: + types: + - checks_requested + branches: + - master + - dev env: NXF_ANSI_LOG: false + NFTEST_VER: "0.8.1" concurrency: group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" cancel-in-progress: true jobs: + changes: + name: Check for changes + runs-on: ubuntu-latest + outputs: + # Expose matched filters as job 'tags' output variable + tags: ${{ steps.filter.outputs.changes }} + steps: + - uses: actions/checkout@v3 + - name: Combine all tags.yml files + id: get_username + run: find . -name "tags.yml" -not -path "./.github/*" -exec cat {} + > .github/tags.yml + - name: debug + run: cat .github/tags.yml + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: ".github/tags.yml" + + define_nxf_versions: + name: Choose nextflow versions to test against depending on target branch + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.nxf_versions.outputs.matrix }} + steps: + - id: nxf_versions + run: | + if [[ "${{ github.event_name }}" == "pull_request" && "${{ github.base_ref }}" == "dev" && "${{ matrix.NXF_VER }}" != "latest-everything" ]]; then + echo matrix='["latest-everything"]' | tee -a $GITHUB_OUTPUT + else + echo matrix='["latest-everything", "23.04.0"]' | tee -a $GITHUB_OUTPUT + fi + test: - name: Run pipeline with test data - # Only run on push if this is the nf-core dev branch (merged PRs) - if: "${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/fetchngs') }}" + name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }} + needs: [changes, define_nxf_versions] + if: needs.changes.outputs.tags != '[]' runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - NXF_VER: - - "23.04.0" - - "latest-everything" + NXF_VER: ${{ fromJson(needs.define_nxf_versions.outputs.matrix) }} + tags: ["${{ fromJson(needs.changes.outputs.tags) }}"] + profile: + - "docker" + steps: - name: Check out pipeline code uses: actions/checkout@v3 @@ -32,29 +74,33 @@ jobs: with: version: "${{ matrix.NXF_VER }}" - - name: Run pipeline with test data - run: | - nextflow run ${GITHUB_WORKSPACE} -profile test,docker --outdir ./results + - name: Cache nf-test installation + id: cache-software + uses: actions/cache@v3 + with: + path: | + /usr/local/bin/nf-test + /home/runner/.nf-test/nf-test.jar + key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest - parameters: - name: Test workflow parameters - if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/fetchngs') }} - runs-on: ubuntu-latest - strategy: - matrix: - parameters: - - "--nf_core_pipeline rnaseq" - - "--ena_metadata_fields run_accession,experiment_accession,library_layout,fastq_ftp,fastq_md5 --sample_mapping_fields run_accession,library_layout" - - "--skip_fastq_download" - - "--force_sratools_download" - steps: - - name: Check out pipeline code - uses: actions/checkout@v2 + - name: Install nf-test + if: steps.cache-software.outputs.cache-hit != 'true' + run: | + wget -qO- https://code.askimed.com/install/nf-test | bash + sudo mv nf-test /usr/local/bin/ - - name: Install Nextflow + - name: Run nf-test run: | - wget -qO- get.nextflow.io | bash - sudo mv nextflow /usr/local/bin/ - - name: Run pipeline with various parameters + nf-test test --tag ${{ matrix.tags }} --profile "test,${{ matrix.profile }}" --junitxml=test.xml + + - name: Output log on failure + if: failure() run: | - nextflow run ${GITHUB_WORKSPACE} -profile test,docker --outdir ./results ${{ matrix.parameters }} + sudo apt install bat > /dev/null + batcat --decorations=always --color=always ${{ github.workspace }}/.nf-test/tests/*/meta/nextflow.log + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + if: always() # always run even if the previous step fails + with: + report_paths: test.xml diff --git a/.gitignore b/.gitignore index 5124c9ac..ef809d7c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,12 @@ +*.pyc +.DS_Store .nextflow* -work/ +.nf-test.log data/ +nf-test +.nf-test* results/ -.DS_Store -testing/ +test.xml testing* -*.pyc +testing/ +work/ diff --git a/.nf-core.yml b/.nf-core.yml index cad7f3e2..0e89cae9 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -4,8 +4,16 @@ lint: files_exist: - .github/workflows/awsfulltest.yml - .github/workflows/awstest.yml + - assets/multiqc_config.yml - conf/igenomes.config + - conf/modules.config + - lib/NfcoreTemplate.groovy + - lib/Utils.groovy - lib/WorkflowFetchngs.groovy + - lib/WorkflowMain.groovy + - lib/nfcore_external_java_deps.jar files_unchanged: + - .gitattributes + - .gitignore - assets/sendmail_template.txt - lib/NfcoreTemplate.groovy diff --git a/CHANGELOG.md b/CHANGELOG.md index 4a33776a..5e484a7a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,31 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [[1.11.0](https://github.com/nf-core/fetchngs/releases/tag/1.11.0)] - 2023-10-18 + +### Credits + +Special thanks to the following for their contributions to the release: + +- [Adam Talbot](https://github.com/adamrtalbot) +- [Edmund Miller](https://github.com/Emiller88) +- [Esha Joshi](https://github.com/ejseqera) +- [Harshil Patel](https://github.com/drpatelh) +- [Lukas Forer](https://github.com/lukfor) +- [James Fellows Yates](https://github.com/jfy133) +- [Maxime Garcia](https://github.com/maxulysse) +- [Rob Syme](https://github.com/robsyme) +- [Sateesh Peri](https://github.com/sateeshperi) +- [Sebastian Schönherr](https://github.com/seppinho) + +Thank you to everyone else that has contributed by reporting bugs, enhancements or in any other way, shape or form. + +### Enhancements & fixes + +- [PR #188](https://github.com/nf-core/fetchngs/pull/188) - Use nf-test for all pipeline testing + +### Enhancements & fixes + ## [[1.10.1](https://github.com/nf-core/fetchngs/releases/tag/1.10.1)] - 2023-10-08 ### Credits @@ -10,8 +35,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 Special thanks to the following for their contributions to the release: - [Adam Talbot](https://github.com/adamrtalbot) +- [Davide Carlson](https://github.com/davidecarlson) - [Harshil Patel](https://github.com/drpatelh) - [Maxime Garcia](https://github.com/maxulysse) +- [MCMandR](https://github.com/MCMandR) - [Rob Syme](https://github.com/robsyme) Thank you to everyone else that has contributed by reporting bugs, enhancements or in any other way, shape or form. diff --git a/CITATIONS.md b/CITATIONS.md index 570e7e5b..482d5600 100644 --- a/CITATIONS.md +++ b/CITATIONS.md @@ -37,7 +37,7 @@ - [Synapse](https://pubmed.ncbi.nlm.nih.gov/24071850/) > Omberg L, Ellrott K, Yuan Y, Kandoth C, Wong C, Kellen MR, Friend SH, Stuart J, Liang H, Margolin AA. Enabling transparent and collaborative computational analysis of 12 tumor types within The Cancer Genome Atlas. Nat Genet. 2013 Oct;45(10):1121-6. doi: 10.1038/ng.2761. PMID: 24071850; PMCID: PMC3950337. -## Software packaging/containerisation tools +## Software packaging/containerisation/testing tools - [Anaconda](https://anaconda.com) @@ -55,6 +55,8 @@ > Merkel, D. (2014). Docker: lightweight linux containers for consistent development and deployment. Linux Journal, 2014(239), 2. doi: 10.5555/2600239.2600241. +- [nf-test](https://code.askimed.com/nf-test) + - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. diff --git a/README.md b/README.md index de90338a..02be6056 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ [![GitHub Actions CI Status](https://github.com/nf-core/fetchngs/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/fetchngs/actions?query=workflow%3A%22nf-core+CI%22) [![GitHub Actions Linting Status](https://github.com/nf-core/fetchngs/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/fetchngs/actions?query=workflow%3A%22nf-core+linting%22) [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/fetchngs/results) +[![nf-test](https://img.shields.io/badge/tested_with-nf--test-337ab7.svg)](https://github.com/askimed/nf-test) [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.5070524-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.5070524) [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) diff --git a/assets/multiqc_config.yml b/assets/multiqc_config.yml deleted file mode 100644 index bafbf577..00000000 --- a/assets/multiqc_config.yml +++ /dev/null @@ -1,13 +0,0 @@ -report_comment: > - This report has been generated by the nf-core/fetchngs - analysis pipeline. For information about how to interpret these results, please see the - documentation. -report_section_order: - "nf-core-fetchngs-methods-description": - order: -1000 - software_versions: - order: -1001 - "nf-core-fetchngs-summary": - order: -1002 - -export_plots: true diff --git a/conf/base.config b/conf/base.config index 4767e522..6af79a7b 100644 --- a/conf/base.config +++ b/conf/base.config @@ -14,6 +14,12 @@ process { memory = { check_max( 6.GB * task.attempt, 'memory' ) } time = { check_max( 4.h * task.attempt, 'time' ) } + publishDir = [ + path: { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }, + mode: params.publish_dir_mode, + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' @@ -57,7 +63,4 @@ process { errorStrategy = 'retry' maxRetries = 2 } - withName:CUSTOM_DUMPSOFTWAREVERSIONS { - cache = false - } } diff --git a/conf/modules.config b/conf/modules.config deleted file mode 100644 index f5cb1c77..00000000 --- a/conf/modules.config +++ /dev/null @@ -1,171 +0,0 @@ -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Config file for defining DSL2 per module options and publishing paths -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Available keys to override module options: - ext.args = Additional arguments appended to command in module. - ext.args2 = Second set of arguments appended to command in module (multi-tool modules). - ext.args3 = Third set of arguments appended to command in module (multi-tool modules). - ext.prefix = File name prefix for output files. ----------------------------------------------------------------------------------------- -*/ - -// -// Generic process options for all workflows -// -process { - - publishDir = [ - path: { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }, - mode: params.publish_dir_mode, - saveAs: { filename -> filename.equals('versions.yml') ? null : filename } - ] - - withName: CUSTOM_DUMPSOFTWAREVERSIONS { - publishDir = [ - path: { "${params.outdir}/pipeline_info" }, - mode: params.publish_dir_mode, - pattern: '*_versions.yml' - ] - } - -} - -// -// Process options for the SRA workflow -// -if (params.input_type == 'sra') { - - process { - - withName: SRA_IDS_TO_RUNINFO { - publishDir = [ - path: { "${params.outdir}/metadata" }, - enabled: false - ] - } - - withName: SRA_RUNINFO_TO_FTP { - publishDir = [ - path: { "${params.outdir}/metadata" }, - mode: params.publish_dir_mode, - saveAs: { filename -> filename.equals('versions.yml') ? null : filename } - ] - } - - withName: SRA_FASTQ_FTP { - ext.args = '--retry 5 --continue-at - --max-time 1200' - publishDir = [ - [ - path: { "${params.outdir}/fastq" }, - mode: params.publish_dir_mode, - pattern: "*.fastq.gz" - ], - [ - path: { "${params.outdir}/fastq/md5" }, - mode: params.publish_dir_mode, - pattern: "*.md5" - ] - ] - } - - withName: SRATOOLS_PREFETCH { - publishDir = [ - path: { "${params.outdir}/sra" }, - enabled: false - ] - } - - withName: SRATOOLS_FASTERQDUMP { - ext.args = '--split-files --include-technical' - publishDir = [ - path: { "${params.outdir}/fastq" }, - mode: params.publish_dir_mode, - pattern: "*.fastq.gz" - ] - } - - withName: SRA_TO_SAMPLESHEET { - publishDir = [ - path: { "${params.outdir}/samplesheet" }, - enabled: false - ] - } - - withName: SRA_MERGE_SAMPLESHEET { - publishDir = [ - path: { "${params.outdir}/samplesheet" }, - mode: params.publish_dir_mode, - saveAs: { filename -> filename.equals('versions.yml') ? null : filename } - ] - } - - withName: MULTIQC_MAPPINGS_CONFIG { - publishDir = [ - path: { "${params.outdir}/samplesheet" }, - mode: params.publish_dir_mode, - saveAs: { filename -> filename.equals('versions.yml') ? null : filename } - ] - } - - } - -} - -// -// Process options for the Synapse workflow -// -if (params.input_type == 'synapse') { - - process { - - withName: SYNAPSE_LIST { - ext.args = '--long' - publishDir = [ - path: { "${params.outdir}/metadata" }, - mode: params.publish_dir_mode, - saveAs: { filename -> filename.equals('versions.yml') ? null : filename } - ] - } - - withName: SYNAPSE_GET { - publishDir = [ - [ - path: { "${params.outdir}/fastq" }, - mode: params.publish_dir_mode, - pattern: "*.fastq.gz" - ], - [ - path: { "${params.outdir}/fastq/md5" }, - mode: params.publish_dir_mode, - pattern: "*.md5" - ] - ] - } - - withName: SYNAPSE_SHOW { - publishDir = [ - path: { "${params.outdir}/metadata" }, - mode: params.publish_dir_mode, - saveAs: { filename -> filename.equals('versions.yml') ? null : filename } - ] - } - - withName: SYNAPSE_TO_SAMPLESHEET { - publishDir = [ - path: { "${params.outdir}/samplesheet" }, - enabled: false - ] - } - - withName: SYNAPSE_MERGE_SAMPLESHEET { - publishDir = [ - path: { "${params.outdir}/samplesheet" }, - mode: params.publish_dir_mode, - saveAs: { filename -> filename.equals('versions.yml') ? null : filename } - ] - } - - } - -} diff --git a/conf/test.config b/conf/test.config index 3c58e9cf..c09a571c 100644 --- a/conf/test.config +++ b/conf/test.config @@ -20,5 +20,7 @@ params { max_time = '6.h' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/fetchngs/sra_ids_test.csv' + input = '${projectDir}/tests/sra_ids_test.csv' + + validationSchemaIgnoreParams = 'test_data_base,merge_samplesheet_ids,fastq_ftp_ids,test_data' } diff --git a/conf/test_data.config b/conf/test_data.config new file mode 100644 index 00000000..670115a8 --- /dev/null +++ b/conf/test_data.config @@ -0,0 +1,47 @@ +params { + // Base directory for test data + test_data_base = "https://raw.githubusercontent.com/nf-core/test-datasets/fetchngs" + + merge_samplesheet_ids = [ "DRX024467_DRR026872", "SRX11047067_SRR14709033", "SRX9504942_SRR13055517", "DRX026011_DRR028935", "SRX17709227_SRR21711856", "SRX9504942_SRR13055518", "ERX1188904_ERR1109373", "SRX17709228_SRR21711855", "SRX9504942_SRR13055519", "ERX1234253_ERR1160846", "SRX6725035_SRR9984183", "SRX9504942_SRR13055520", "SRX10940790_SRR14593545", "SRX9315476_SRR12848126", "SRX9626017_SRR13191702" ] + + def merge_samplesheet_url = "${params.test_data_base}/modules/local/sra_merge_samplesheet/samplesheets/" + def merge_mappings_url = "${params.test_data_base}/modules/local/sra_merge_samplesheet/mappings/" + def merge_samplesheet_urls = [] + def merge_mappings_urls = [] + + merge_samplesheet_ids.each { id -> + merge_samplesheet_urls += "${merge_samplesheet_url}${id}.samplesheet.csv" + merge_mappings_urls += "${merge_mappings_url}/${id}.mappings.csv" + } + + fastq_ftp_ids = ["SRR13191702"] + def fastq_ftp_url = "ftp.sra.ebi.ac.uk/vol1/fastq/SRR131/002/SRR13191702/" + def fastq_ftp_urls = [] + + fastq_ftp_ids.each { id -> + fastq_ftp_urls += "${fastq_ftp_url}${id}_1.fastq.gz" + fastq_ftp_urls += "${fastq_ftp_url}${id}_2.fastq.gz" + } + + test_data { + 'sarscov2'{ + 'illumina' { + SRR11140744_tar_gz = "${params.test_data_base}/data/genomics/sarscov2/illumina/sra/SRR11140744.tar.gz" + SRR13255544_tar_gz = "${params.test_data_base}/data/genomics/sarscov2/illumina/sra/SRR13255544.tar.gz" + } + } + 'generic' { + 'config' { + ncbi_user_settings = "${params.test_data_base}/data/generic/config/ncbi_user_settings.mkfg" + } + } + 'modules_local' { + multiqc_mappings_config = "${params.test_data_base}/modules/local/multiqc_mappings_config/SRX9626017_SRR13191702.mappings.csv" + sra_merge_samplesheet_samplesheets = merge_samplesheet_urls + sra_merge_samplesheet_mappings = merge_mappings_urls + sra_to_samplesheet = "${params.test_data_base}/modules/local/sra_to_samplesheet/SRX9626017_SRR13191702.mappings.csv" + sra_fastq_ftp = fastq_ftp_urls + sra_runinfo_to_ftp = "${params.test_data_base}/modules/local/sra_runinfo_to_ftp/SRR13191702.runinfo.tsv" + } + } +} diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy deleted file mode 100755 index ed58a7ae..00000000 --- a/lib/NfcoreTemplate.groovy +++ /dev/null @@ -1,330 +0,0 @@ -// -// This file holds several functions used within the nf-core pipeline template. -// - -import org.yaml.snakeyaml.Yaml -import groovy.json.JsonOutput - -class NfcoreTemplate { - - // - // Check AWS Batch related parameters have been specified correctly - // - public static void awsBatch(workflow, params) { - if (workflow.profile.contains('awsbatch')) { - // Check params.awsqueue and params.awsregion have been set if running on AWSBatch - assert (params.awsqueue && params.awsregion) : "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" - // Check outdir paths to be S3 buckets if running on AWSBatch - assert params.outdir.startsWith('s3:') : "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!" - } - } - - // - // Warn if a -profile or Nextflow config has not been provided to run the pipeline - // - public static void checkConfigProvided(workflow, log) { - if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { - log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + - "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + - " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + - " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + - " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + - "Please refer to the quick start section and usage docs for the pipeline.\n " - } - } - - // - // Generate version string - // - public static String version(workflow) { - String version_string = "" - - if (workflow.manifest.version) { - def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' - version_string += "${prefix_v}${workflow.manifest.version}" - } - - if (workflow.commitId) { - def git_shortsha = workflow.commitId.substring(0, 7) - version_string += "-g${git_shortsha}" - } - - return version_string - } - - // - // Construct and send completion email - // - public static void email(workflow, params, summary_params, projectDir, log) { - - // Set up the e-mail variables - def subject = "[$workflow.manifest.name] Successful: $workflow.runName" - if (!workflow.success) { - subject = "[$workflow.manifest.name] FAILED: $workflow.runName" - } - - def summary = [:] - for (group in summary_params.keySet()) { - summary << summary_params[group] - } - - def misc_fields = [:] - misc_fields['Date Started'] = workflow.start - misc_fields['Date Completed'] = workflow.complete - misc_fields['Pipeline script file path'] = workflow.scriptFile - misc_fields['Pipeline script hash ID'] = workflow.scriptId - if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository - if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId - if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision - misc_fields['Nextflow Version'] = workflow.nextflow.version - misc_fields['Nextflow Build'] = workflow.nextflow.build - misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp - - def email_fields = [:] - email_fields['version'] = NfcoreTemplate.version(workflow) - email_fields['runName'] = workflow.runName - email_fields['success'] = workflow.success - email_fields['dateComplete'] = workflow.complete - email_fields['duration'] = workflow.duration - email_fields['exitStatus'] = workflow.exitStatus - email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - email_fields['errorReport'] = (workflow.errorReport ?: 'None') - email_fields['commandLine'] = workflow.commandLine - email_fields['projectDir'] = workflow.projectDir - email_fields['summary'] = summary << misc_fields - - // Check if we are only sending emails on failure - def email_address = params.email - if (!params.email && params.email_on_fail && !workflow.success) { - email_address = params.email_on_fail - } - - // Render the TXT template - def engine = new groovy.text.GStringTemplateEngine() - def tf = new File("$projectDir/assets/email_template.txt") - def txt_template = engine.createTemplate(tf).make(email_fields) - def email_txt = txt_template.toString() - - // Render the HTML template - def hf = new File("$projectDir/assets/email_template.html") - def html_template = engine.createTemplate(hf).make(email_fields) - def email_html = html_template.toString() - - // Render the sendmail template - def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir" ] - def sf = new File("$projectDir/assets/sendmail_template.txt") - def sendmail_template = engine.createTemplate(sf).make(smail_fields) - def sendmail_html = sendmail_template.toString() - - // Send the HTML e-mail - Map colors = logColours(params.monochrome_logs) - if (email_address) { - try { - if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } - // Try to send HTML e-mail using sendmail - [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" - } catch (all) { - // Catch failures and try with plaintext - def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] - mail_cmd.execute() << email_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" - } - } - - // Write summary e-mail HTML to a file - def output_d = new File("${params.outdir}/pipeline_info/") - if (!output_d.exists()) { - output_d.mkdirs() - } - def output_hf = new File(output_d, "pipeline_report.html") - output_hf.withWriter { w -> w << email_html } - def output_tf = new File(output_d, "pipeline_report.txt") - output_tf.withWriter { w -> w << email_txt } - } - - // - // Construct and send a notification to a web server as JSON - // e.g. Microsoft Teams and Slack - // - public static void IM_notification(workflow, params, summary_params, projectDir, log) { - def hook_url = params.hook_url - - def summary = [:] - for (group in summary_params.keySet()) { - summary << summary_params[group] - } - - def misc_fields = [:] - misc_fields['start'] = workflow.start - misc_fields['complete'] = workflow.complete - misc_fields['scriptfile'] = workflow.scriptFile - misc_fields['scriptid'] = workflow.scriptId - if (workflow.repository) misc_fields['repository'] = workflow.repository - if (workflow.commitId) misc_fields['commitid'] = workflow.commitId - if (workflow.revision) misc_fields['revision'] = workflow.revision - misc_fields['nxf_version'] = workflow.nextflow.version - misc_fields['nxf_build'] = workflow.nextflow.build - misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp - - def msg_fields = [:] - msg_fields['version'] = NfcoreTemplate.version(workflow) - msg_fields['runName'] = workflow.runName - msg_fields['success'] = workflow.success - msg_fields['dateComplete'] = workflow.complete - msg_fields['duration'] = workflow.duration - msg_fields['exitStatus'] = workflow.exitStatus - msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - msg_fields['errorReport'] = (workflow.errorReport ?: 'None') - msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") - msg_fields['projectDir'] = workflow.projectDir - msg_fields['summary'] = summary << misc_fields - - // Render the JSON template - def engine = new groovy.text.GStringTemplateEngine() - // Different JSON depending on the service provider - // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format - def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" - def hf = new File("$projectDir/assets/${json_path}") - def json_template = engine.createTemplate(hf).make(msg_fields) - def json_message = json_template.toString() - - // POST - def post = new URL(hook_url).openConnection(); - post.setRequestMethod("POST") - post.setDoOutput(true) - post.setRequestProperty("Content-Type", "application/json") - post.getOutputStream().write(json_message.getBytes("UTF-8")); - def postRC = post.getResponseCode(); - if (! postRC.equals(200)) { - log.warn(post.getErrorStream().getText()); - } - } - - // - // Dump pipeline parameters in a json file - // - public static void dump_parameters(workflow, params) { - def output_d = new File("${params.outdir}/pipeline_info/") - if (!output_d.exists()) { - output_d.mkdirs() - } - - def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - def output_pf = new File(output_d, "params_${timestamp}.json") - def jsonStr = JsonOutput.toJson(params) - output_pf.text = JsonOutput.prettyPrint(jsonStr) - } - - // - // Print pipeline summary on completion - // - public static void summary(workflow, params, log) { - Map colors = logColours(params.monochrome_logs) - if (workflow.success) { - if (workflow.stats.ignoredCount == 0) { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" - } - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" - } - } - - // - // ANSII Colours used for terminal logging - // - public static Map logColours(Boolean monochrome_logs) { - Map colorcodes = [:] - - // Reset / Meta - colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" - colorcodes['bold'] = monochrome_logs ? '' : "\033[1m" - colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" - colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m" - colorcodes['blink'] = monochrome_logs ? '' : "\033[5m" - colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m" - colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" - - // Regular Colors - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" - - // Bold - colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" - colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" - colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" - colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" - colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" - colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" - colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" - colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" - - // Underline - colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" - colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" - colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" - colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" - colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" - colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" - colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" - colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" - - // High Intensity - colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" - colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" - colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" - colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" - colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" - colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" - colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" - colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" - - // Bold High Intensity - colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" - colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" - colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" - colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" - colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" - colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" - colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" - - return colorcodes - } - - // - // Does what is says on the tin - // - public static String dashedLine(monochrome_logs) { - Map colors = logColours(monochrome_logs) - return "-${colors.dim}----------------------------------------------------${colors.reset}-" - } - - // - // nf-core logo - // - public static String logo(workflow, monochrome_logs) { - Map colors = logColours(monochrome_logs) - String workflow_version = NfcoreTemplate.version(workflow) - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) - } -} diff --git a/lib/Utils.groovy b/lib/Utils.groovy deleted file mode 100644 index 8d030f4e..00000000 --- a/lib/Utils.groovy +++ /dev/null @@ -1,47 +0,0 @@ -// -// This file holds several Groovy functions that could be useful for any Nextflow pipeline -// - -import org.yaml.snakeyaml.Yaml - -class Utils { - - // - // When running with -profile conda, warn if channels have not been set-up appropriately - // - public static void checkCondaChannels(log) { - Yaml parser = new Yaml() - def channels = [] - try { - def config = parser.load("conda config --show channels".execute().text) - channels = config.channels - } catch(NullPointerException | IOException e) { - log.warn "Could not verify conda channel configuration." - return - } - - // Check that all channels are present - // This channel list is ordered by required channel priority. - def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] - def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean - - // Check that they are in the right order - def channel_priority_violation = false - def n = required_channels_in_order.size() - for (int i = 0; i < n - 1; i++) { - channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) - } - - if (channels_missing | channel_priority_violation) { - log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + - " There is a problem with your Conda configuration!\n\n" + - " You will need to set-up the conda-forge and bioconda channels correctly.\n" + - " Please refer to https://bioconda.github.io/\n" + - " The observed channel order is \n" + - " ${channels}\n" + - " but the following channel order is required:\n" + - " ${required_channels_in_order}\n" + - "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - } - } -} diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy deleted file mode 100755 index f9469941..00000000 --- a/lib/WorkflowMain.groovy +++ /dev/null @@ -1,105 +0,0 @@ -// -// This file holds several functions specific to the main.nf workflow in the nf-core/fetchngs pipeline -// - -import nextflow.Nextflow - -class WorkflowMain { - - // - // Citation string for pipeline - // - public static String citation(workflow) { - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + - "* The pipeline\n" + - " https://doi.org/10.5281/zenodo.5070524\n\n" + - "* The nf-core framework\n" + - " https://doi.org/10.1038/s41587-020-0439-x\n\n" + - "* Software dependencies\n" + - " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" - } - - - // - // Validate parameters and print summary to screen - // - public static void initialise(workflow, params, log) { - - // Print workflow version and exit on --version - if (params.version) { - String workflow_version = NfcoreTemplate.version(workflow) - log.info "${workflow.manifest.name} ${workflow_version}" - System.exit(0) - } - - // Check that a -profile or Nextflow config has been provided to run the pipeline - NfcoreTemplate.checkConfigProvided(workflow, log) - - // Check that conda channels are set-up correctly - if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { - Utils.checkCondaChannels(log) - } - - // Check AWS batch settings - NfcoreTemplate.awsBatch(workflow, params) - - // Check input has been provided - if (!params.input) { - Nextflow.error("Please provide an input file containing ids to the pipeline - one per line e.g. '--input ids.csv'") - } - - // Check valid input_type has been provided - def input_types = ['sra', 'synapse'] - if (!input_types.contains(params.input_type)) { - Nextflow.error("Invalid option: '${params.input_type}'. Valid options for '--input_type': ${input_types.join(', ')}.") - } - } - - // Check if input ids are from the SRA - public static Boolean isSraId(input) { - def is_sra = false - def total_ids = 0 - def no_match_ids = [] - def pattern = /^(((SR|ER|DR)[APRSX])|(SAM(N|EA|D))|(PRJ(NA|EB|DB))|(GS[EM]))(\d+)$/ - input.eachLine { line -> - total_ids += 1 - if (!(line =~ pattern)) { - no_match_ids << line - } - } - - def num_match = total_ids - no_match_ids.size() - if (num_match > 0) { - if (num_match == total_ids) { - is_sra = true - } else { - Nextflow.error("Mixture of ids provided via --input: ${no_match_ids.join(', ')}\nPlease provide either SRA / ENA / GEO / DDBJ or Synapse ids!") - } - } - return is_sra - } - - // Check if input ids are from the Synapse platform - public static Boolean isSynapseId(input) { - def is_synapse = false - def total_ids = 0 - def no_match_ids = [] - def pattern = /^syn\d{8}$/ - input.eachLine { line -> - total_ids += 1 - if (!(line =~ pattern)) { - no_match_ids << line - } - } - - def num_match = total_ids - no_match_ids.size() - if (num_match > 0) { - if (num_match == total_ids) { - is_synapse = true - } else { - Nextflow.error("Mixture of ids provided via --input: ${no_match_ids.join(', ')}\nPlease provide either SRA / ENA / GEO / DDBJ or Synapse ids!") - } - } - return is_synapse - } -} diff --git a/lib/WorkflowSra.groovy b/lib/WorkflowSra.groovy deleted file mode 100755 index 8b17a2c1..00000000 --- a/lib/WorkflowSra.groovy +++ /dev/null @@ -1,34 +0,0 @@ -// -// This file holds several functions specific to the workflow/sra.nf in the nf-core/fetchngs pipeline -// - -import nextflow.Nextflow - -class WorkflowSra { - - // - // Check and validate parameters - // - public static void initialise(params) { - // Check minimal ENA fields are provided to download FastQ files - def valid_ena_metadata_fields = ['run_accession', 'experiment_accession', 'library_layout', 'fastq_ftp', 'fastq_md5'] - def ena_metadata_fields = params.ena_metadata_fields ? params.ena_metadata_fields.split(',').collect{ it.trim().toLowerCase() } : valid_ena_metadata_fields - if (!ena_metadata_fields.containsAll(valid_ena_metadata_fields)) { - Nextflow.error("Invalid option: '${params.ena_metadata_fields}'. Minimally required fields for '--ena_metadata_fields': '${valid_ena_metadata_fields.join(',')}'") - } - } - - // - // Print a warning after pipeline has completed - // - public static void curateSamplesheetWarn(log) { - log.warn "=============================================================================\n" + - " Please double-check the samplesheet that has been auto-created by the pipeline.\n\n" + - " Public databases don't reliably hold information such as strandedness\n" + - " information, controls etc\n\n" + - " All of the sample metadata obtained from the ENA has been appended\n" + - " as additional columns to help you manually curate the samplesheet before\n" + - " running nf-core/other pipelines.\n" + - "===================================================================================" - } -} diff --git a/lib/WorkflowSynapse.groovy b/lib/WorkflowSynapse.groovy deleted file mode 100755 index 8a79c6a6..00000000 --- a/lib/WorkflowSynapse.groovy +++ /dev/null @@ -1,92 +0,0 @@ -// -// This file holds several functions specific to the workflow/synapse.nf in the nf-core/fetchngs pipeline -// - -class WorkflowSynapse { - - // - // Convert metadata obtained from the 'synapse show' command to a Groovy map - // - public static Map synapseShowToMap(synapse_file) { - def meta = [:] - def category = '' - synapse_file.eachLine { line -> - def entries = [null, null] - if (!line.startsWith(' ') && !line.trim().isEmpty()) { - category = line.tokenize(':')[0] - } else { - entries = line.trim().tokenize('=') - } - meta["${category}|${entries[0]}"] = entries[1] - } - meta.id = meta['properties|id'] - meta.name = meta['properties|name'] - meta.md5 = meta['File|md5'] - return meta.findAll{ it.value != null } - } - - // - // Print a warning after pipeline has completed - // - public static void curateSamplesheetWarn(log) { - log.warn "=============================================================================\n" + - " Please double-check the samplesheet that has been auto-created by the pipeline.\n\n" + - " Where applicable, default values will be used for sample-specific metadata\n" + - " such as strandedness, controls etc as this information is not provided\n" + - " in a standardised manner when uploading data to Synapse.\n" + - "===================================================================================" - } - - // - // Obtain Sample ID from File Name - // - public static String sampleNameFromFastQ(input_file, pattern) { - - def sampleids = "" - - def filePattern = pattern.toString() - int p = filePattern.lastIndexOf('/') - if( p != -1 ) - filePattern = filePattern.substring(p+1) - - input_file.each { - String fileName = input_file.getFileName().toString() - - String indexOfWildcards = filePattern.findIndexOf { it=='*' || it=='?' } - String indexOfBrackets = filePattern.findIndexOf { it=='{' || it=='[' } - if( indexOfWildcards==-1 && indexOfBrackets==-1 ) { - if( fileName == filePattern ) - return actual.getSimpleName() - throw new IllegalArgumentException("Not a valid file pair globbing pattern: pattern=$filePattern file=$fileName") - } - - int groupCount = 0 - for( int i=0; i filename.equals('versions.yml') ? null : filename } + ] + } +} \ No newline at end of file diff --git a/modules/local/multiqc_mappings_config/tests/main.nf.test b/modules/local/multiqc_mappings_config/tests/main.nf.test new file mode 100644 index 00000000..75356197 --- /dev/null +++ b/modules/local/multiqc_mappings_config/tests/main.nf.test @@ -0,0 +1,31 @@ +nextflow_process { + + name "Test process: MULTIQC_MAPPINGS_CONFIG" + script "../main.nf" + process "MULTIQC_MAPPINGS_CONFIG" + tag "modules" + tag "modules_local" + tag "multiqc_mappings_config" + + test("Should run without failures") { + + when { + params { + outdir = "$outputDir" + } + + process { + """ + input[0] = file(params.test_data['modules_local']['multiqc_mappings_config'], checkIfExists: true) + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/modules/local/multiqc_mappings_config/tests/main.nf.test.snap b/modules/local/multiqc_mappings_config/tests/main.nf.test.snap new file mode 100644 index 00000000..1a4d16c4 --- /dev/null +++ b/modules/local/multiqc_mappings_config/tests/main.nf.test.snap @@ -0,0 +1,27 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + [ + "multiqc_config.yml:md5,7f3cb10fff83ba9eb3e8fa6862d1290a", + "versions.yml:md5,dd4c66f0551d15510b36bb2e2b2fdd73" + ] + ], + "1": [ + "versions.yml:md5,dd4c66f0551d15510b36bb2e2b2fdd73" + ], + "versions": [ + "versions.yml:md5,dd4c66f0551d15510b36bb2e2b2fdd73" + ], + "yml": [ + [ + "multiqc_config.yml:md5,7f3cb10fff83ba9eb3e8fa6862d1290a", + "versions.yml:md5,dd4c66f0551d15510b36bb2e2b2fdd73" + ] + ] + } + ], + "timestamp": "2023-09-22T10:58:18.132284" + } +} \ No newline at end of file diff --git a/modules/local/multiqc_mappings_config/tests/tags.yml b/modules/local/multiqc_mappings_config/tests/tags.yml new file mode 100644 index 00000000..595e2e31 --- /dev/null +++ b/modules/local/multiqc_mappings_config/tests/tags.yml @@ -0,0 +1,2 @@ +multiqc_mappings_config: + - modules/local/multiqc_mappings_config/** diff --git a/modules/local/sra_fastq_ftp/nextflow.config b/modules/local/sra_fastq_ftp/nextflow.config new file mode 100644 index 00000000..7b5ebf70 --- /dev/null +++ b/modules/local/sra_fastq_ftp/nextflow.config @@ -0,0 +1,17 @@ +process { + withName: 'SRA_FASTQ_FTP' { + ext.args = '--retry 5 --continue-at - --max-time 1200' + publishDir = [ + [ + path: { "${params.outdir}/fastq" }, + mode: params.publish_dir_mode, + pattern: "*.fastq.gz" + ], + [ + path: { "${params.outdir}/fastq/md5" }, + mode: params.publish_dir_mode, + pattern: "*.md5" + ] + ] + } +} diff --git a/modules/local/sra_fastq_ftp/tests/main.nf.test b/modules/local/sra_fastq_ftp/tests/main.nf.test new file mode 100644 index 00000000..ab846e5d --- /dev/null +++ b/modules/local/sra_fastq_ftp/tests/main.nf.test @@ -0,0 +1,34 @@ +nextflow_process { + + name "Test process: SRA_FASTQ_FTP" + script "../main.nf" + process "SRA_FASTQ_FTP" + tag "modules" + tag "modules_local" + tag "sra_fastq_ftp" + + test("Should run without failures") { + + when { + params { + outdir = "$outputDir" + } + + process { + """ + input[0] = [ + [ id:'SRX9626017_SRR13191702', single_end:false, md5_1: '89c5be920021a035084d8aeb74f32df7', md5_2: '56271be38a80db78ef3bdfc5d9909b98' ], // meta map + params.test_data['modules_local']['sra_fastq_ftp'] + ] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/modules/local/sra_fastq_ftp/tests/main.nf.test.snap b/modules/local/sra_fastq_ftp/tests/main.nf.test.snap new file mode 100644 index 00000000..fb815455 --- /dev/null +++ b/modules/local/sra_fastq_ftp/tests/main.nf.test.snap @@ -0,0 +1,71 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + [ + { + "id": "SRX9626017_SRR13191702", + "single_end": false, + "md5_1": "89c5be920021a035084d8aeb74f32df7", + "md5_2": "56271be38a80db78ef3bdfc5d9909b98" + }, + [ + "SRX9626017_SRR13191702_1.fastq.gz:md5,89c5be920021a035084d8aeb74f32df7", + "SRX9626017_SRR13191702_2.fastq.gz:md5,56271be38a80db78ef3bdfc5d9909b98" + ] + ] + ], + "1": [ + [ + { + "id": "SRX9626017_SRR13191702", + "single_end": false, + "md5_1": "89c5be920021a035084d8aeb74f32df7", + "md5_2": "56271be38a80db78ef3bdfc5d9909b98" + }, + [ + "SRX9626017_SRR13191702_1.fastq.gz.md5:md5,055a6916ec9ee478e453d50651f87997", + "SRX9626017_SRR13191702_2.fastq.gz.md5:md5,c30ac785f8d80ec563fabf604d8bf945" + ] + ] + ], + "2": [ + "versions.yml:md5,6b9d69dea1c1305f74a65197ee871f1b" + ], + "fastq": [ + [ + { + "id": "SRX9626017_SRR13191702", + "single_end": false, + "md5_1": "89c5be920021a035084d8aeb74f32df7", + "md5_2": "56271be38a80db78ef3bdfc5d9909b98" + }, + [ + "SRX9626017_SRR13191702_1.fastq.gz:md5,89c5be920021a035084d8aeb74f32df7", + "SRX9626017_SRR13191702_2.fastq.gz:md5,56271be38a80db78ef3bdfc5d9909b98" + ] + ] + ], + "md5": [ + [ + { + "id": "SRX9626017_SRR13191702", + "single_end": false, + "md5_1": "89c5be920021a035084d8aeb74f32df7", + "md5_2": "56271be38a80db78ef3bdfc5d9909b98" + }, + [ + "SRX9626017_SRR13191702_1.fastq.gz.md5:md5,055a6916ec9ee478e453d50651f87997", + "SRX9626017_SRR13191702_2.fastq.gz.md5:md5,c30ac785f8d80ec563fabf604d8bf945" + ] + ] + ], + "versions": [ + "versions.yml:md5,6b9d69dea1c1305f74a65197ee871f1b" + ] + } + ], + "timestamp": "2023-09-22T10:58:46.998421" + } +} \ No newline at end of file diff --git a/modules/local/sra_fastq_ftp/tests/tags.yml b/modules/local/sra_fastq_ftp/tests/tags.yml new file mode 100644 index 00000000..e7474bef --- /dev/null +++ b/modules/local/sra_fastq_ftp/tests/tags.yml @@ -0,0 +1,2 @@ +sra_fastq_ftp: + - modules/local/sra_fastq_ftp/** diff --git a/modules/local/sra_ids_to_runinfo/nextflow.config b/modules/local/sra_ids_to_runinfo/nextflow.config new file mode 100644 index 00000000..73da969a --- /dev/null +++ b/modules/local/sra_ids_to_runinfo/nextflow.config @@ -0,0 +1,8 @@ +process { + withName: 'SRA_IDS_TO_RUNINFO' { + publishDir = [ + path: { "${params.outdir}/metadata" }, + enabled: false + ] + } +} \ No newline at end of file diff --git a/modules/local/sra_ids_to_runinfo/tests/main.nf.test b/modules/local/sra_ids_to_runinfo/tests/main.nf.test new file mode 100644 index 00000000..087f9069 --- /dev/null +++ b/modules/local/sra_ids_to_runinfo/tests/main.nf.test @@ -0,0 +1,32 @@ +nextflow_process { + + name "Test process: SRA_IDS_TO_RUNINFO" + script "../main.nf" + process "SRA_IDS_TO_RUNINFO" + tag "modules" + tag "modules_local" + tag "sra_ids_to_runinfo" + + test("Should run without failures") { + + when { + params { + outdir = "$outputDir" + } + + process { + """ + input[0] = 'SRR13191702' + input[1] = '' + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/modules/local/sra_ids_to_runinfo/tests/main.nf.test.snap b/modules/local/sra_ids_to_runinfo/tests/main.nf.test.snap new file mode 100644 index 00000000..241efe2a --- /dev/null +++ b/modules/local/sra_ids_to_runinfo/tests/main.nf.test.snap @@ -0,0 +1,21 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + "SRR13191702.runinfo.tsv:md5,3a1be35781ca6e8a28d8fd4d2f3bbe85" + ], + "1": [ + "versions.yml:md5,1c14442e9b494b586eafe41e77300fae" + ], + "tsv": [ + "SRR13191702.runinfo.tsv:md5,3a1be35781ca6e8a28d8fd4d2f3bbe85" + ], + "versions": [ + "versions.yml:md5,1c14442e9b494b586eafe41e77300fae" + ] + } + ], + "timestamp": "2023-09-22T10:58:56.721948" + } +} \ No newline at end of file diff --git a/modules/local/sra_ids_to_runinfo/tests/tags.yml b/modules/local/sra_ids_to_runinfo/tests/tags.yml new file mode 100644 index 00000000..63ea7db3 --- /dev/null +++ b/modules/local/sra_ids_to_runinfo/tests/tags.yml @@ -0,0 +1,2 @@ +sra_ids_to_runinfo: + - modules/local/sra_ids_to_runinfo/** diff --git a/modules/local/sra_merge_samplesheet/nextflow.config b/modules/local/sra_merge_samplesheet/nextflow.config new file mode 100644 index 00000000..f2926529 --- /dev/null +++ b/modules/local/sra_merge_samplesheet/nextflow.config @@ -0,0 +1,9 @@ +process { + withName: 'SRA_MERGE_SAMPLESHEET' { + publishDir = [ + path: { "${params.outdir}/samplesheet" }, + mode: params.publish_dir_mode, + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + } +} \ No newline at end of file diff --git a/modules/local/sra_merge_samplesheet/tests/main.nf.test b/modules/local/sra_merge_samplesheet/tests/main.nf.test new file mode 100644 index 00000000..b7c98622 --- /dev/null +++ b/modules/local/sra_merge_samplesheet/tests/main.nf.test @@ -0,0 +1,32 @@ +nextflow_process { + + name "Test process: SRA_MERGE_SAMPLESHEET" + script "../main.nf" + process "SRA_MERGE_SAMPLESHEET" + tag "modules" + tag "modules_local" + tag "sra_merge_samplesheet" + + test("Should run without failures") { + + when { + params { + outdir = "$outputDir" + } + + process { + """ + input[0] = params.test_data['modules_local']['sra_merge_samplesheet_samplesheets'].collect { file(it, checkIfExists: true) } + input[1] = params.test_data['modules_local']['sra_merge_samplesheet_mappings'].collect { file(it, checkIfExists: true) } + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/modules/local/sra_merge_samplesheet/tests/main.nf.test.snap b/modules/local/sra_merge_samplesheet/tests/main.nf.test.snap new file mode 100644 index 00000000..203395ae --- /dev/null +++ b/modules/local/sra_merge_samplesheet/tests/main.nf.test.snap @@ -0,0 +1,27 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + "samplesheet.csv:md5,ef756557b0735becd2574c2a3f5840b2" + ], + "1": [ + "id_mappings.csv:md5,07262807636ce4b50102308eabdcf253" + ], + "2": [ + "versions.yml:md5,410006679d5e496d8c55e58e78ca6b34" + ], + "mappings": [ + "id_mappings.csv:md5,07262807636ce4b50102308eabdcf253" + ], + "samplesheet": [ + "samplesheet.csv:md5,ef756557b0735becd2574c2a3f5840b2" + ], + "versions": [ + "versions.yml:md5,410006679d5e496d8c55e58e78ca6b34" + ] + } + ], + "timestamp": "2023-09-22T10:59:14.022116" + } +} \ No newline at end of file diff --git a/modules/local/sra_merge_samplesheet/tests/tags.yml b/modules/local/sra_merge_samplesheet/tests/tags.yml new file mode 100644 index 00000000..a4292b70 --- /dev/null +++ b/modules/local/sra_merge_samplesheet/tests/tags.yml @@ -0,0 +1,2 @@ +sra_merge_samplesheet: + - modules/local/sra_merge_samplesheet/** diff --git a/modules/local/sra_runinfo_to_ftp/nextflow.config b/modules/local/sra_runinfo_to_ftp/nextflow.config new file mode 100644 index 00000000..aee1848c --- /dev/null +++ b/modules/local/sra_runinfo_to_ftp/nextflow.config @@ -0,0 +1,9 @@ +process { + withName: 'SRA_RUNINFO_TO_FTP' { + publishDir = [ + path: { "${params.outdir}/metadata" }, + mode: params.publish_dir_mode, + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + } +} \ No newline at end of file diff --git a/modules/local/sra_runinfo_to_ftp/tests/main.nf.test b/modules/local/sra_runinfo_to_ftp/tests/main.nf.test new file mode 100644 index 00000000..3b1405f9 --- /dev/null +++ b/modules/local/sra_runinfo_to_ftp/tests/main.nf.test @@ -0,0 +1,31 @@ +nextflow_process { + + name "Test process: SRA_RUNINFO_TO_FTP" + script "../main.nf" + process "SRA_RUNINFO_TO_FTP" + tag "modules" + tag "modules_local" + tag "sra_runinfo_to_ftp" + + test("Should run without failures") { + + when { + params { + outdir = "$outputDir" + } + + process { + """ + input[0] = file(params.test_data['modules_local']['sra_runinfo_to_ftp'], checkIfExists: true) + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/modules/local/sra_runinfo_to_ftp/tests/main.nf.test.snap b/modules/local/sra_runinfo_to_ftp/tests/main.nf.test.snap new file mode 100644 index 00000000..cd246303 --- /dev/null +++ b/modules/local/sra_runinfo_to_ftp/tests/main.nf.test.snap @@ -0,0 +1,21 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + "SRR13191702.runinfo_ftp.tsv:md5,94378c448c044b3e20e5c54e442ab62e" + ], + "1": [ + "versions.yml:md5,e95f8185f665127a73622a19d321bcca" + ], + "tsv": [ + "SRR13191702.runinfo_ftp.tsv:md5,94378c448c044b3e20e5c54e442ab62e" + ], + "versions": [ + "versions.yml:md5,e95f8185f665127a73622a19d321bcca" + ] + } + ], + "timestamp": "2023-09-22T10:58:36.16611" + } +} \ No newline at end of file diff --git a/modules/local/sra_runinfo_to_ftp/tests/tags.yml b/modules/local/sra_runinfo_to_ftp/tests/tags.yml new file mode 100644 index 00000000..0987a1e6 --- /dev/null +++ b/modules/local/sra_runinfo_to_ftp/tests/tags.yml @@ -0,0 +1,2 @@ +sra_runinfo_to_ftp: + - modules/local/sra_runinfo_to_ftp/** diff --git a/modules/local/sra_to_samplesheet/nextflow.config b/modules/local/sra_to_samplesheet/nextflow.config new file mode 100644 index 00000000..da241c1a --- /dev/null +++ b/modules/local/sra_to_samplesheet/nextflow.config @@ -0,0 +1,8 @@ +process { + withName: SRA_TO_SAMPLESHEET { + publishDir = [ + path: { "${params.outdir}/samplesheet" }, + enabled: false + ] + } +} diff --git a/modules/local/sra_to_samplesheet/tests/main.nf.test b/modules/local/sra_to_samplesheet/tests/main.nf.test new file mode 100644 index 00000000..6094717b --- /dev/null +++ b/modules/local/sra_to_samplesheet/tests/main.nf.test @@ -0,0 +1,34 @@ +nextflow_process { + + name "Test process: SRA_TO_SAMPLESHEET" + script "../main.nf" + process "SRA_TO_SAMPLESHEET" + tag "modules" + tag "modules_local" + tag "sra_to_samplesheet" + + test("Should run without failures") { + + when { + params { + outdir = "$outputDir" + } + + process { + """ + input[0] = [id:'ERX1188904_ERR1109373', run_accession:'ERR1109373', experiment_accession:'ERX1188904', sample_accession:'SAMEA3643867', experiment_alias:'ena-EXPERIMENT-CAM-03-11-2015-17:01:52:847-7', run_alias:'ena-RUN-CAM-03-11-2015-17:01:52:847-7', sample_alias:'sample_56', study_alias:'ena-STUDY-CAM-02-11-2015-17:42:24:189-13', library_layout:'PAIRED', experiment_title:'Illumina HiSeq 2500 paired end sequencing', sample_title:'RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome', sample_description:'RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome', fastq_md5:'8d7d7b854d0207d1226477a30103fade;9fd57225d6c07a31843276d6df9b15c0;5a62e8f785687dce890cfb4fe3e607f9', fastq_ftp:'ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_1.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_2.fastq.gz', fastq_1:'./results/fastq/ERX1188904_ERR1109373_1.fastq.gz', fastq_2:'./results/fastq/ERX1188904_ERR1109373_2.fastq.gz', md5_1:'9fd57225d6c07a31843276d6df9b15c0', md5_2:'5a62e8f785687dce890cfb4fe3e607f9', single_end:false] + input[1] = 'rnaseq' + input[2] = 'auto' + input[3] = 'experiment_accession,run_accession,sample_accession,experiment_alias,run_alias,sample_alias,experiment_title,sample_title,sample_description' + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/modules/local/sra_to_samplesheet/tests/main.nf.test.snap b/modules/local/sra_to_samplesheet/tests/main.nf.test.snap new file mode 100644 index 00000000..8cc8686a --- /dev/null +++ b/modules/local/sra_to_samplesheet/tests/main.nf.test.snap @@ -0,0 +1,113 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + [ + { + "id": "ERX1188904_ERR1109373", + "run_accession": "ERR1109373", + "experiment_accession": "ERX1188904", + "sample_accession": "SAMEA3643867", + "experiment_alias": "ena-EXPERIMENT-CAM-03-11-2015-17:01:52:847-7", + "run_alias": "ena-RUN-CAM-03-11-2015-17:01:52:847-7", + "sample_alias": "sample_56", + "study_alias": "ena-STUDY-CAM-02-11-2015-17:42:24:189-13", + "library_layout": "PAIRED", + "experiment_title": "Illumina HiSeq 2500 paired end sequencing", + "sample_title": "RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome", + "sample_description": "RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome", + "fastq_md5": "8d7d7b854d0207d1226477a30103fade;9fd57225d6c07a31843276d6df9b15c0;5a62e8f785687dce890cfb4fe3e607f9", + "fastq_ftp": "ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_1.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_2.fastq.gz", + "fastq_1": "./results/fastq/ERX1188904_ERR1109373_1.fastq.gz", + "fastq_2": "./results/fastq/ERX1188904_ERR1109373_2.fastq.gz", + "md5_1": "9fd57225d6c07a31843276d6df9b15c0", + "md5_2": "5a62e8f785687dce890cfb4fe3e607f9", + "single_end": false + }, + "ERX1188904_ERR1109373.samplesheet.csv:md5,e7898191d57258e049ee7129d36f5c08" + ] + ], + "1": [ + [ + { + "id": "ERX1188904_ERR1109373", + "run_accession": "ERR1109373", + "experiment_accession": "ERX1188904", + "sample_accession": "SAMEA3643867", + "experiment_alias": "ena-EXPERIMENT-CAM-03-11-2015-17:01:52:847-7", + "run_alias": "ena-RUN-CAM-03-11-2015-17:01:52:847-7", + "sample_alias": "sample_56", + "study_alias": "ena-STUDY-CAM-02-11-2015-17:42:24:189-13", + "library_layout": "PAIRED", + "experiment_title": "Illumina HiSeq 2500 paired end sequencing", + "sample_title": "RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome", + "sample_description": "RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome", + "fastq_md5": "8d7d7b854d0207d1226477a30103fade;9fd57225d6c07a31843276d6df9b15c0;5a62e8f785687dce890cfb4fe3e607f9", + "fastq_ftp": "ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_1.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_2.fastq.gz", + "fastq_1": "./results/fastq/ERX1188904_ERR1109373_1.fastq.gz", + "fastq_2": "./results/fastq/ERX1188904_ERR1109373_2.fastq.gz", + "md5_1": "9fd57225d6c07a31843276d6df9b15c0", + "md5_2": "5a62e8f785687dce890cfb4fe3e607f9", + "single_end": false + }, + "ERX1188904_ERR1109373.mappings.csv:md5,d09ddb4f0709675e5dfe1eadf12c608f" + ] + ], + "mappings": [ + [ + { + "id": "ERX1188904_ERR1109373", + "run_accession": "ERR1109373", + "experiment_accession": "ERX1188904", + "sample_accession": "SAMEA3643867", + "experiment_alias": "ena-EXPERIMENT-CAM-03-11-2015-17:01:52:847-7", + "run_alias": "ena-RUN-CAM-03-11-2015-17:01:52:847-7", + "sample_alias": "sample_56", + "study_alias": "ena-STUDY-CAM-02-11-2015-17:42:24:189-13", + "library_layout": "PAIRED", + "experiment_title": "Illumina HiSeq 2500 paired end sequencing", + "sample_title": "RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome", + "sample_description": "RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome", + "fastq_md5": "8d7d7b854d0207d1226477a30103fade;9fd57225d6c07a31843276d6df9b15c0;5a62e8f785687dce890cfb4fe3e607f9", + "fastq_ftp": "ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_1.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_2.fastq.gz", + "fastq_1": "./results/fastq/ERX1188904_ERR1109373_1.fastq.gz", + "fastq_2": "./results/fastq/ERX1188904_ERR1109373_2.fastq.gz", + "md5_1": "9fd57225d6c07a31843276d6df9b15c0", + "md5_2": "5a62e8f785687dce890cfb4fe3e607f9", + "single_end": false + }, + "ERX1188904_ERR1109373.mappings.csv:md5,d09ddb4f0709675e5dfe1eadf12c608f" + ] + ], + "samplesheet": [ + [ + { + "id": "ERX1188904_ERR1109373", + "run_accession": "ERR1109373", + "experiment_accession": "ERX1188904", + "sample_accession": "SAMEA3643867", + "experiment_alias": "ena-EXPERIMENT-CAM-03-11-2015-17:01:52:847-7", + "run_alias": "ena-RUN-CAM-03-11-2015-17:01:52:847-7", + "sample_alias": "sample_56", + "study_alias": "ena-STUDY-CAM-02-11-2015-17:42:24:189-13", + "library_layout": "PAIRED", + "experiment_title": "Illumina HiSeq 2500 paired end sequencing", + "sample_title": "RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome", + "sample_description": "RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome", + "fastq_md5": "8d7d7b854d0207d1226477a30103fade;9fd57225d6c07a31843276d6df9b15c0;5a62e8f785687dce890cfb4fe3e607f9", + "fastq_ftp": "ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_1.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_2.fastq.gz", + "fastq_1": "./results/fastq/ERX1188904_ERR1109373_1.fastq.gz", + "fastq_2": "./results/fastq/ERX1188904_ERR1109373_2.fastq.gz", + "md5_1": "9fd57225d6c07a31843276d6df9b15c0", + "md5_2": "5a62e8f785687dce890cfb4fe3e607f9", + "single_end": false + }, + "ERX1188904_ERR1109373.samplesheet.csv:md5,e7898191d57258e049ee7129d36f5c08" + ] + ] + } + ], + "timestamp": "2023-09-22T10:58:27.169349" + } +} \ No newline at end of file diff --git a/modules/local/sra_to_samplesheet/tests/tags.yml b/modules/local/sra_to_samplesheet/tests/tags.yml new file mode 100644 index 00000000..2f2d527b --- /dev/null +++ b/modules/local/sra_to_samplesheet/tests/tags.yml @@ -0,0 +1,2 @@ +sra_to_samplesheet: + - modules/local/sra_to_samplesheet/** diff --git a/modules/local/synapse_get/nextflow.config b/modules/local/synapse_get/nextflow.config new file mode 100644 index 00000000..869f80aa --- /dev/null +++ b/modules/local/synapse_get/nextflow.config @@ -0,0 +1,16 @@ +process { + withName: 'SYNAPSE_GET' { + publishDir = [ + [ + path: { "${params.outdir}/fastq" }, + mode: params.publish_dir_mode, + pattern: "*.fastq.gz" + ], + [ + path: { "${params.outdir}/fastq/md5" }, + mode: params.publish_dir_mode, + pattern: "*.md5" + ] + ] + } +} \ No newline at end of file diff --git a/modules/local/synapse_list/nextflow.config b/modules/local/synapse_list/nextflow.config new file mode 100644 index 00000000..15124234 --- /dev/null +++ b/modules/local/synapse_list/nextflow.config @@ -0,0 +1,10 @@ +process { + withName: SYNAPSE_LIST { + ext.args = '--long' + publishDir = [ + path: { "${params.outdir}/metadata" }, + mode: params.publish_dir_mode, + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + } +} diff --git a/modules/local/synapse_merge_samplesheet/nextflow.config b/modules/local/synapse_merge_samplesheet/nextflow.config new file mode 100644 index 00000000..9befef9c --- /dev/null +++ b/modules/local/synapse_merge_samplesheet/nextflow.config @@ -0,0 +1,9 @@ +process { + withName: SYNAPSE_MERGE_SAMPLESHEET { + publishDir = [ + path: { "${params.outdir}/samplesheet" }, + mode: params.publish_dir_mode, + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + } +} \ No newline at end of file diff --git a/modules/local/synapse_show/nextflow.config b/modules/local/synapse_show/nextflow.config new file mode 100644 index 00000000..a1864dfe --- /dev/null +++ b/modules/local/synapse_show/nextflow.config @@ -0,0 +1,9 @@ +process { + withName: 'SYNAPSE_SHOW' { + publishDir = [ + path: { "${params.outdir}/metadata" }, + mode: params.publish_dir_mode, + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + } +} \ No newline at end of file diff --git a/modules/local/synapse_to_samplesheet/nextflow.config b/modules/local/synapse_to_samplesheet/nextflow.config new file mode 100644 index 00000000..b286c12c --- /dev/null +++ b/modules/local/synapse_to_samplesheet/nextflow.config @@ -0,0 +1,8 @@ +process { + withName: SYNAPSE_TO_SAMPLESHEET { + publishDir = [ + path: { "${params.outdir}/samplesheet" }, + enabled: false + ] + } +} \ No newline at end of file diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf deleted file mode 100644 index c9d014b1..00000000 --- a/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ /dev/null @@ -1,24 +0,0 @@ -process CUSTOM_DUMPSOFTWAREVERSIONS { - label 'process_single' - - // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda "bioconda::multiqc=1.15" - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.15--pyhdfd78af_0' : - 'biocontainers/multiqc:1.15--pyhdfd78af_0' }" - - input: - path versions - - output: - path "software_versions.yml" , emit: yml - path "software_versions_mqc.yml", emit: mqc_yml - path "versions.yml" , emit: versions - - when: - task.ext.when == null || task.ext.when - - script: - def args = task.ext.args ?: '' - template 'dumpsoftwareversions.py' -} diff --git a/modules/nf-core/custom/dumpsoftwareversions/meta.yml b/modules/nf-core/custom/dumpsoftwareversions/meta.yml deleted file mode 100644 index c32657de..00000000 --- a/modules/nf-core/custom/dumpsoftwareversions/meta.yml +++ /dev/null @@ -1,36 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json -name: custom_dumpsoftwareversions -description: Custom module used to dump software versions within the nf-core pipeline template -keywords: - - custom - - dump - - version -tools: - - custom: - description: Custom module used to dump software versions within the nf-core pipeline template - homepage: https://github.com/nf-core/tools - documentation: https://github.com/nf-core/tools - licence: ["MIT"] -input: - - versions: - type: file - description: YML file containing software versions - pattern: "*.yml" - -output: - - yml: - type: file - description: Standard YML file containing software versions - pattern: "software_versions.yml" - - mqc_yml: - type: file - description: MultiQC custom content YML file containing software versions - pattern: "software_versions_mqc.yml" - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" - -authors: - - "@drpatelh" - - "@grst" diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py deleted file mode 100755 index da033408..00000000 --- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python - - -"""Provide functions to merge multiple versions.yml files.""" - - -import yaml -import platform -from textwrap import dedent - - -def _make_versions_html(versions): - """Generate a tabular HTML output of all versions for MultiQC.""" - html = [ - dedent( - """\\ - - - - - - - - - - """ - ) - ] - for process, tmp_versions in sorted(versions.items()): - html.append("") - for i, (tool, version) in enumerate(sorted(tmp_versions.items())): - html.append( - dedent( - f"""\\ - - - - - - """ - ) - ) - html.append("") - html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") - return "\\n".join(html) - - -def main(): - """Load all version files and generate merged output.""" - versions_this_module = {} - versions_this_module["${task.process}"] = { - "python": platform.python_version(), - "yaml": yaml.__version__, - } - - with open("$versions") as f: - versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module - - # aggregate versions by the module name (derived from fully-qualified process name) - versions_by_module = {} - for process, process_versions in versions_by_process.items(): - module = process.split(":")[-1] - try: - if versions_by_module[module] != process_versions: - raise AssertionError( - "We assume that software versions are the same between all modules. " - "If you see this error-message it means you discovered an edge-case " - "and should open an issue in nf-core/tools. " - ) - except KeyError: - versions_by_module[module] = process_versions - - versions_by_module["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version", - } - - versions_mqc = { - "id": "software_versions", - "section_name": "${workflow.manifest.name} Software Versions", - "section_href": "https://github.com/${workflow.manifest.name}", - "plot_type": "html", - "description": "are collected at run time from the software output.", - "data": _make_versions_html(versions_by_module), - } - - with open("software_versions.yml", "w") as f: - yaml.dump(versions_by_module, f, default_flow_style=False) - with open("software_versions_mqc.yml", "w") as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - - with open("versions.yml", "w") as f: - yaml.dump(versions_this_module, f, default_flow_style=False) - - -if __name__ == "__main__": - main() diff --git a/modules/nf-core/custom/sratoolsncbisettings/nextflow.config b/modules/nf-core/custom/sratoolsncbisettings/nextflow.config new file mode 100644 index 00000000..e69de29b diff --git a/modules/nf-core/custom/sratoolsncbisettings/tests/main.nf.test b/modules/nf-core/custom/sratoolsncbisettings/tests/main.nf.test new file mode 100644 index 00000000..ec4a7797 --- /dev/null +++ b/modules/nf-core/custom/sratoolsncbisettings/tests/main.nf.test @@ -0,0 +1,114 @@ +nextflow_process { + + name "Test Process CUSTOM_SRATOOLSNCBISETTINGS" + script "../main.nf" + process "CUSTOM_SRATOOLSNCBISETTINGS" + config "modules/nf-core/custom/sratoolsncbisettings/tests/nextflow.config" + tag "modules" + tag "modules_nfcore" + tag "custom" + tag "custom/sratoolsncbisettings" + + test("Should run without failures") { + + when { + params { + settings_path = '/tmp/.ncbi' + settings_file = "${params.settings_path}/user-settings.mkfg" + } + + process { + """ + file(params.settings_path).mkdirs() + def settings = file(params.test_data['generic']['config']['ncbi_user_settings'], checkIfExists: true) + settings.copyTo(params.settings_file) + """ + } + } + + then { + assert process.success + assert snapshot( + process.out.versions + ).match() + + with(process.out.ncbi_settings) { + assert path(get(0)).readLines().any { it.contains('/LIBS/GUID') } + assert path(get(0)).readLines().any { it.contains('/libs/cloud/report_instance_identity') } + } + } + + } + + test("Should fail") { + + when { + params { + settings_path = '/tmp/.ncbi' + settings_file = "${params.settings_path}/user-settings.mkfg" + } + + process { + """ + file(params.settings_path).mkdirs() + def settings = file(params.settings_file) + settings.text = ''' + ## auto-generated configuration file - DO NOT EDIT ## + config/default = "false" + /repository/remote/main/CGI/resolver-cgi = "https://trace.ncbi.nlm.nih.gov/Traces/names/names.fcgi" + /repository/remote/protected/CGI/resolver-cgi = "https://trace.ncbi.nlm.nih.gov/Traces/names/names.fcgi" + /repository/user/ad/public/apps/file/volumes/flatAd = "." + /repository/user/ad/public/apps/refseq/volumes/refseqAd = "." + /repository/user/ad/public/apps/sra/volumes/sraAd = "." + /repository/user/ad/public/apps/sraPileup/volumes/ad = "." + /repository/user/ad/public/apps/sraRealign/volumes/ad = "." + /repository/user/ad/public/apps/wgs/volumes/wgsAd = "." + /repository/user/ad/public/root = "." + /repository/user/default-path = "/root/ncbi" + '''.stripIndent() + """ + } + } + + then { + assert process.failed + assert snapshot( + process.out.versions + ).match() + assert process.stdout.any { it.contains('Command error:') } + assert process.stdout.any { it.contains('missing the required entries') } + assert process.stdout.any { it.contains('/LIBS/GUID') } + assert process.stdout.any { it.contains('/libs/cloud/report_instance_identity') } + } + + } + + test("Should run with nonexisting") { + + when { + params { + settings_path = '/tmp/.ncbi' + settings_file = "${params.settings_path}/user-settings.mkfg" + } + + process { + """ + def settings = file(params.settings_file) + settings.delete() + """ + } + } + + then { + assert process.success + assert snapshot(process.out.versions).match() + + with(process.out.ncbi_settings) { + { assert path(get(0)).readLines().any { it.contains('/LIBS/GUID') } } + { assert path(get(0)).readLines().any { it.contains('/libs/cloud/report_instance_identity') } } + } + } + + } + +} diff --git a/modules/nf-core/custom/sratoolsncbisettings/tests/main.nf.test.snap b/modules/nf-core/custom/sratoolsncbisettings/tests/main.nf.test.snap new file mode 100644 index 00000000..edb3b351 --- /dev/null +++ b/modules/nf-core/custom/sratoolsncbisettings/tests/main.nf.test.snap @@ -0,0 +1,18 @@ +{ + "Should run with nonexisting": { + "content": [ + [ + "versions.yml:md5,fec13b593c3b42ddd38f2fc77df25b70" + ] + ], + "timestamp": "2023-10-12T12:24:24.023849" + }, + "Should run without failures": { + "content": [ + [ + "versions.yml:md5,fec13b593c3b42ddd38f2fc77df25b70" + ] + ], + "timestamp": "2023-10-12T10:40:51.717351" + } +} \ No newline at end of file diff --git a/modules/nf-core/custom/sratoolsncbisettings/tests/nextflow.config b/modules/nf-core/custom/sratoolsncbisettings/tests/nextflow.config new file mode 100644 index 00000000..c4a96e94 --- /dev/null +++ b/modules/nf-core/custom/sratoolsncbisettings/tests/nextflow.config @@ -0,0 +1,16 @@ +params.settings_path = '/tmp/.ncbi' +params.settings_file = "${params.settings_path}/user-settings.mkfg" + +env.NCBI_SETTINGS = params.settings_file + +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: CUSTOM_SRATOOLSNCBISETTINGS { + containerOptions = { + (workflow.containerEngine == 'singularity') ? + "-B ${params.settings_path}:${params.settings_path}" : + "-v ${params.settings_path}:${params.settings_path}" + } + } +} \ No newline at end of file diff --git a/modules/nf-core/custom/sratoolsncbisettings/tests/tags.yml b/modules/nf-core/custom/sratoolsncbisettings/tests/tags.yml new file mode 100644 index 00000000..fb4a08a7 --- /dev/null +++ b/modules/nf-core/custom/sratoolsncbisettings/tests/tags.yml @@ -0,0 +1,2 @@ +custom/sratoolsncbisettings: + - modules/nf-core/custom/sratoolsncbisettings/** diff --git a/modules/nf-core/sratools/fasterqdump/nextflow.config b/modules/nf-core/sratools/fasterqdump/nextflow.config new file mode 100644 index 00000000..f98b140d --- /dev/null +++ b/modules/nf-core/sratools/fasterqdump/nextflow.config @@ -0,0 +1,10 @@ +process { + withName: SRATOOLS_FASTERQDUMP { + ext.args = '--split-files --include-technical' + publishDir = [ + path: { "${params.outdir}/fastq" }, + mode: params.publish_dir_mode, + pattern: "*.fastq.gz" + ] + } +} \ No newline at end of file diff --git a/modules/nf-core/sratools/fasterqdump/sratools-fasterqdump.diff b/modules/nf-core/sratools/fasterqdump/sratools-fasterqdump.diff new file mode 100644 index 00000000..5275d3e4 --- /dev/null +++ b/modules/nf-core/sratools/fasterqdump/sratools-fasterqdump.diff @@ -0,0 +1,36 @@ +Changes in module 'nf-core/sratools/fasterqdump' +--- /dev/null ++++ modules/nf-core/sratools/fasterqdump/nextflow.config +@@ -0,0 +1,10 @@ ++process { ++ withName: SRATOOLS_FASTERQDUMP { ++ ext.args = '--split-files --include-technical' ++ publishDir = [ ++ path: { "${params.outdir}/fastq" }, ++ mode: params.publish_dir_mode, ++ pattern: "*.fastq.gz" ++ ] ++ } ++} +--- modules/nf-core/sratools/fasterqdump/tests/main.nf.test.snap ++++ modules/nf-core/sratools/fasterqdump/tests/main.nf.test.snap +@@ -8,7 +8,7 @@ + "id": "test_single_end", + "single_end": true + }, +- "test_single_end.fastq.gz:md5,1054c7b71884acdb5eed8a378f18be82" ++ "test_single_end_1.fastq.gz:md5,1054c7b71884acdb5eed8a378f18be82" + ] + ], + "1": [ +@@ -20,7 +20,7 @@ + "id": "test_single_end", + "single_end": true + }, +- "test_single_end.fastq.gz:md5,1054c7b71884acdb5eed8a378f18be82" ++ "test_single_end_1.fastq.gz:md5,1054c7b71884acdb5eed8a378f18be82" + ] + ], + "versions": [ + +************************************************************ diff --git a/modules/nf-core/sratools/fasterqdump/tests/main.nf.test b/modules/nf-core/sratools/fasterqdump/tests/main.nf.test new file mode 100644 index 00000000..ee4c5844 --- /dev/null +++ b/modules/nf-core/sratools/fasterqdump/tests/main.nf.test @@ -0,0 +1,70 @@ +nextflow_process { + name "Test Process SRATOOLS_FASTERQDUMP" + script "../main.nf" + process "SRATOOLS_FASTERQDUMP" + tag "modules" + tag "modules_nfcore" + tag "sratools/fasterqdump" + + test("Single-end") { + + setup { + run("UNTAR") { + script "modules/nf-core/untar/main.nf" + process { + """ + input[0] = Channel.of([ [], file(params.test_data['sarscov2']['illumina']['SRR13255544_tar_gz'], checkIfExists: true) ]) + """ + } + } + } + + when { + process { + """ + input[0] = UNTAR.out.untar.collect{ meta, files -> files }.map{ files -> [ [ id:'test_single_end', single_end:true ], files]} + input[1] = file(params.test_data['generic']['config']['ncbi_user_settings'], checkIfExists: true) + input[2] = [] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("Paired-end") { + + setup { + run("UNTAR") { + script "modules/nf-core/untar/main.nf" + process { + """ + input[0] = Channel.of([ [], file(params.test_data['sarscov2']['illumina']['SRR11140744_tar_gz'], checkIfExists: true) ]) + """ + } + } + } + + when { + process { + """ + input[0] = UNTAR.out.untar.collect{ meta, files -> files }.map{ files -> [ [ id:'test_paired_end', single_end:false ], files]} + input[1] = file(params.test_data['generic']['config']['ncbi_user_settings'], checkIfExists: true) + input[2] = [] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/modules/nf-core/sratools/fasterqdump/tests/main.nf.test.snap b/modules/nf-core/sratools/fasterqdump/tests/main.nf.test.snap new file mode 100644 index 00000000..77fd6c79 --- /dev/null +++ b/modules/nf-core/sratools/fasterqdump/tests/main.nf.test.snap @@ -0,0 +1,70 @@ +{ + "Single-end": { + "content": [ + { + "0": [ + [ + { + "id": "test_single_end", + "single_end": true + }, + "test_single_end_1.fastq.gz:md5,1054c7b71884acdb5eed8a378f18be82" + ] + ], + "1": [ + "versions.yml:md5,a3d61a9761e1606ef8459f0b68821d7a" + ], + "reads": [ + [ + { + "id": "test_single_end", + "single_end": true + }, + "test_single_end_1.fastq.gz:md5,1054c7b71884acdb5eed8a378f18be82" + ] + ], + "versions": [ + "versions.yml:md5,a3d61a9761e1606ef8459f0b68821d7a" + ] + } + ], + "timestamp": "2023-10-10T18:11:12.87053" + }, + "Paired-end": { + "content": [ + { + "0": [ + [ + { + "id": "test_paired_end", + "single_end": false + }, + [ + "test_paired_end_1.fastq.gz:md5,193809c784a4ea132ab2a253fa4f55b6", + "test_paired_end_2.fastq.gz:md5,3e3b3af3413f50a1685fd7b3f1456d4e" + ] + ] + ], + "1": [ + "versions.yml:md5,a3d61a9761e1606ef8459f0b68821d7a" + ], + "reads": [ + [ + { + "id": "test_paired_end", + "single_end": false + }, + [ + "test_paired_end_1.fastq.gz:md5,193809c784a4ea132ab2a253fa4f55b6", + "test_paired_end_2.fastq.gz:md5,3e3b3af3413f50a1685fd7b3f1456d4e" + ] + ] + ], + "versions": [ + "versions.yml:md5,a3d61a9761e1606ef8459f0b68821d7a" + ] + } + ], + "timestamp": "2023-10-10T18:11:50.928006" + } +} \ No newline at end of file diff --git a/modules/nf-core/sratools/fasterqdump/tests/tags.yml b/modules/nf-core/sratools/fasterqdump/tests/tags.yml new file mode 100644 index 00000000..5d1ddcb3 --- /dev/null +++ b/modules/nf-core/sratools/fasterqdump/tests/tags.yml @@ -0,0 +1,2 @@ +sratools/fasterqdump: + - modules/nf-core/sratools/fasterqdump/** diff --git a/modules/nf-core/sratools/prefetch/nextflow.config b/modules/nf-core/sratools/prefetch/nextflow.config new file mode 100644 index 00000000..a2ca8848 --- /dev/null +++ b/modules/nf-core/sratools/prefetch/nextflow.config @@ -0,0 +1,8 @@ +process { + withName: SRATOOLS_PREFETCH { + publishDir = [ + path: { "${params.outdir}/sra" }, + enabled: false + ] + } +} \ No newline at end of file diff --git a/modules/nf-core/sratools/prefetch/tests/main.nf.test b/modules/nf-core/sratools/prefetch/tests/main.nf.test new file mode 100644 index 00000000..fcf4036d --- /dev/null +++ b/modules/nf-core/sratools/prefetch/tests/main.nf.test @@ -0,0 +1,54 @@ +nextflow_process { + name "Test Process SRATOOLS_PREFETCH" + script "../main.nf" + process "SRATOOLS_PREFETCH" + tag "modules" + tag "modules_nfcore" + tag "sratools/prefetch" + + test("sratools/prefetch") { + + when { + params { + outdir = "output" + } + process { + """ + input[0] = Channel.of([ [ id:'test', single_end:false ], 'DRR000774' ]) + input[1] = file(params.test_data['generic']['config']['ncbi_user_settings'], checkIfExists: true) + input[2] = [] + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sratools/prefetch with sralite") { + + when { + params { + outdir = "output" + } + process { + """ + input[0] = Channel.of([ [ id:'test', single_end:false ], 'SRR1170046' ]) + input[1] = file(params.test_data['generic']['config']['ncbi_user_settings'], checkIfExists: true) + input[2] = [] + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } +} diff --git a/modules/nf-core/sratools/prefetch/tests/main.nf.test.snap b/modules/nf-core/sratools/prefetch/tests/main.nf.test.snap new file mode 100644 index 00000000..ab1d2088 --- /dev/null +++ b/modules/nf-core/sratools/prefetch/tests/main.nf.test.snap @@ -0,0 +1,72 @@ +{ + "sratools/prefetch with sralite": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + [ + "SRR1170046.sralite:md5,7acfce556ca0951aff49d780899c105b" + ] + ] + ], + "1": [ + "versions.yml:md5,c967dea4135cb75490e1e801c4639efc" + ], + "sra": [ + [ + { + "id": "test", + "single_end": false + }, + [ + "SRR1170046.sralite:md5,7acfce556ca0951aff49d780899c105b" + ] + ] + ], + "versions": [ + "versions.yml:md5,c967dea4135cb75490e1e801c4639efc" + ] + } + ], + "timestamp": "2023-10-13T12:11:24.563510389" + }, + "sratools/prefetch": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + [ + "DRR000774.sra:md5,7647dba20c89c0e3d7ad13842f060eb0" + ] + ] + ], + "1": [ + "versions.yml:md5,c967dea4135cb75490e1e801c4639efc" + ], + "sra": [ + [ + { + "id": "test", + "single_end": false + }, + [ + "DRR000774.sra:md5,7647dba20c89c0e3d7ad13842f060eb0" + ] + ] + ], + "versions": [ + "versions.yml:md5,c967dea4135cb75490e1e801c4639efc" + ] + } + ], + "timestamp": "2023-10-13T12:11:02.75256571" + } +} \ No newline at end of file diff --git a/modules/nf-core/sratools/prefetch/tests/tags.yml b/modules/nf-core/sratools/prefetch/tests/tags.yml new file mode 100644 index 00000000..52110bfd --- /dev/null +++ b/modules/nf-core/sratools/prefetch/tests/tags.yml @@ -0,0 +1,2 @@ +sratools/prefetch: + - modules/nf-core/sratools/prefetch/** diff --git a/modules/nf-core/untar/main.nf b/modules/nf-core/untar/main.nf new file mode 100644 index 00000000..61461c39 --- /dev/null +++ b/modules/nf-core/untar/main.nf @@ -0,0 +1,63 @@ +process UNTAR { + tag "$archive" + label 'process_single' + + conda "conda-forge::sed=4.7 conda-forge::grep=3.11 conda-forge::tar=1.34" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : + 'nf-core/ubuntu:20.04' }" + + input: + tuple val(meta), path(archive) + + output: + tuple val(meta), path("$prefix"), emit: untar + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + prefix = task.ext.prefix ?: ( meta.id ? "${meta.id}" : archive.baseName.toString().replaceFirst(/\.tar$/, "")) + + """ + mkdir $prefix + + ## Ensures --strip-components only applied when top level of tar contents is a directory + ## If just files or multiple directories, place all in prefix + if [[ \$(tar -taf ${archive} | grep -o -P "^.*?\\/" | uniq | wc -l) -eq 1 ]]; then + tar \\ + -C $prefix --strip-components 1 \\ + -xavf \\ + $args \\ + $archive \\ + $args2 + else + tar \\ + -C $prefix \\ + -xavf \\ + $args \\ + $archive \\ + $args2 + fi + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') + END_VERSIONS + """ + + stub: + prefix = task.ext.prefix ?: ( meta.id ? "${meta.id}" : archive.toString().replaceFirst(/\.[^\.]+(.gz)?$/, "")) + """ + mkdir $prefix + touch ${prefix}/file.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') + END_VERSIONS + """ +} diff --git a/modules/nf-core/untar/meta.yml b/modules/nf-core/untar/meta.yml new file mode 100644 index 00000000..db241a6e --- /dev/null +++ b/modules/nf-core/untar/meta.yml @@ -0,0 +1,41 @@ +name: untar +description: Extract files. +keywords: + - untar + - uncompress + - extract +tools: + - untar: + description: | + Extract tar.gz files. + documentation: https://www.gnu.org/software/tar/manual/ + licence: ["GPL-3.0-or-later"] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - archive: + type: file + description: File to be untar + pattern: "*.{tar}.{gz}" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - untar: + type: directory + description: Directory containing contents of archive + pattern: "*/" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@joseespinosa" + - "@drpatelh" + - "@matthdsm" + - "@jfy133" diff --git a/nextflow.config b/nextflow.config index 3877ed26..f9f7962a 100644 --- a/nextflow.config +++ b/nextflow.config @@ -49,8 +49,8 @@ params { // Schema validation default options validationFailUnrecognisedParams = false validationLenientMode = false - validationSchemaIgnoreParams = 'genomes' validationShowHiddenParams = false + validationSchemaIgnoreParams = '' validate_params = true } @@ -65,6 +65,13 @@ try { System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") } +// Workflow specific configs +if (params.input_type == 'sra') { + includeConfig './workflows/sra/nextflow.config' +} else if (params.input_type == 'synapse') { + includeConfig './workflows/synapse/nextflow.config' +} + // Load nf-core/fetchngs custom profiles from different institutions. // Warning: Uncomment only if a pipeline-specific instititutional config already exists on nf-core/configs! // try { @@ -163,7 +170,10 @@ profiles { executor.cpus = 4 executor.memory = 8.GB } - test { includeConfig 'conf/test.config' } + test { + includeConfig 'conf/test.config' + includeConfig 'conf/test_data.config' + } test_synapse { includeConfig 'conf/test_synapse.config' } test_full { includeConfig 'conf/test_full.config' } } @@ -220,13 +230,10 @@ manifest { description = """Pipeline to fetch metadata and raw FastQ files from public databases""" mainScript = 'main.nf' nextflowVersion = '!>=23.04.0' - version = '1.10.1' + version = '1.11.0' doi = 'https://doi.org/10.5281/zenodo.5070524' } -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' - // Function to ensure that resource requirements don't go beyond // a maximum limit def check_max(obj, type) { diff --git a/nf-test.config b/nf-test.config new file mode 100644 index 00000000..cb656b94 --- /dev/null +++ b/nf-test.config @@ -0,0 +1,16 @@ +config { + // location for all nf-tests + testsDir "." + + // nf-test directory including temporary files for each test + workDir "/tmp" + + // location of library folder that is added automatically to the classpath + libDir "lib/" + + // location of an optional nextflow.config file specific for executing tests + configFile "nextflow.config" + + // run all test with the defined docker profile from the main nextflow.config + profile "" +} diff --git a/subworkflows/local/nf_core_fetchngs_utils/main.nf b/subworkflows/local/nf_core_fetchngs_utils/main.nf new file mode 100644 index 00000000..509a4473 --- /dev/null +++ b/subworkflows/local/nf_core_fetchngs_utils/main.nf @@ -0,0 +1,313 @@ +// +// Subworkflow with functionality specific to the nf-core/fetchngs pipeline +// + +/* +======================================================================================== + IMPORT MODULES/SUBWORKFLOWS +======================================================================================== +*/ + +include { NEXTFLOW_PIPELINE_UTILS; getWorkflowVersion } from '../../nf-core/nextflowpipelineutils/main' +include { NF_VALIDATION_PLUGIN_UTILS } from '../../nf-core/nfvalidation_plugin_utils/main.nf' +include { + NFCORE_PIPELINE_UTILS; + workflowCitation; + nfCoreLogo; + dashedLine; + completionEmail; + completionSummary; + imNotification +} from '../../nf-core/nfcore_pipeline_utils' + +/* +======================================================================================== + SUBWORKFLOW TO INITIALISE PIPELINE +======================================================================================== +*/ + +workflow PIPELINE_INITIALISATION { + + main: + + // + // Print version and exit if required and dump pipeline parameters to JSON file + // + NEXTFLOW_PIPELINE_UTILS ( + params.version, + true, + params.outdir, + workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 + ) + + // + // Validate parameters and generate parameter summary to stdout + // + def pre_help_text = nfCoreLogo(getWorkflowVersion()) + def post_help_text = '\n' + workflowCitation() + '\n' + dashedLine() + def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input ids.csv --outdir " + NF_VALIDATION_PLUGIN_UTILS ( + params.help, + workflow_command, + pre_help_text, + post_help_text, + params.validate_params, + "nextflow_schema.json" + ) + + // + // Check config provided to the pipeline + // + NFCORE_PIPELINE_UTILS () + + // + // Auto-detect input id type + // + ch_input = file(params.input) + def input_type = '' + if (isSraId(ch_input)) { + input_type = 'sra' + sraCheckENAMetadataFields() + } else if (isSynapseId(ch_input)) { + input_type = 'synapse' + } else { + error('Ids provided via --input not recognised please make sure they are either SRA / ENA / GEO / DDBJ or Synapse ids!') + } + + if (params.input_type != input_type) { + error("Ids auto-detected as ${input_type}. Please provide '--input_type ${input_type}' as a parameter to the pipeline!") + } + + // Read in ids from --input file + Channel + .from(ch_input) + .splitCsv(header:false, sep:'', strip:true) + .map { it[0] } + .unique() + .set { ch_ids } + + emit: + ids = ch_ids + summary_params = NF_VALIDATION_PLUGIN_UTILS.out.summary_params +} + +/* +======================================================================================== + SUBWORKFLOW FOR PIPELINE COMPLETION +======================================================================================== +*/ + +workflow PIPELINE_COMPLETION { + + take: + versions // channel: software tools versions + input_type // string: 'sra' or 'synapse' + email // string: email address + email_on_fail // string: email address sent on pipeline failure + hook_url // string: hook URL for notifications + summary_params // map: Groovy map of the parameters used in the pipeline + + main: + + // + // MODULE: Dump software versions for all tools used in the workflow + // + pipeline_version_info = Channel.of("""\"workflow\": + nextflow: ${workflow.nextflow.version} + ${workflow.manifest.name}: ${workflow.manifest.version} + """.stripIndent()) + + versions = versions.mix(pipeline_version_info) + versions.collectFile(name: 'fetchngs_mqc_versions.yml', storeDir: "${params.outdir}/pipeline_info") + + // + // Completion email and summary + // + workflow.onComplete { + if (email || email_on_fail) { + completionEmail(summary_params) + } + + completionSummary() + + if (hook_url) { + imNotification(summary_params) + } + + if (input_type == 'sra') { + sraCurateSamplesheetWarn() + } else if (input_type == 'synapse') { + synapseCurateSamplesheetWarn() + } + } +} + +/* +======================================================================================== + FUNCTIONS +======================================================================================== +*/ + +// +// Check if input ids are from the SRA +// +def isSraId(input) { + def is_sra = false + def total_ids = 0 + def no_match_ids = [] + def pattern = /^(((SR|ER|DR)[APRSX])|(SAM(N|EA|D))|(PRJ(NA|EB|DB))|(GS[EM]))(\d+)$/ + input.eachLine { line -> + total_ids += 1 + if (!(line =~ pattern)) { + no_match_ids << line + } + } + + def num_match = total_ids - no_match_ids.size() + if (num_match > 0) { + if (num_match == total_ids) { + is_sra = true + } else { + error("Mixture of ids provided via --input: ${no_match_ids.join(', ')}\nPlease provide either SRA / ENA / GEO / DDBJ or Synapse ids!") + } + } + return is_sra +} + +// +// Check if input ids are from the Synapse platform +// +def isSynapseId(input) { + def is_synapse = false + def total_ids = 0 + def no_match_ids = [] + def pattern = /^syn\d{8}$/ + input.eachLine { line -> + total_ids += 1 + if (!(line =~ pattern)) { + no_match_ids << line + } + } + + def num_match = total_ids - no_match_ids.size() + if (num_match > 0) { + if (num_match == total_ids) { + is_synapse = true + } else { + error("Mixture of ids provided via --input: ${no_match_ids.join(', ')}\nPlease provide either SRA / ENA / GEO / DDBJ or Synapse ids!") + } + } + return is_synapse +} + +// +// Check and validate parameters +// +def sraCheckENAMetadataFields() { + // Check minimal ENA fields are provided to download FastQ files + def valid_ena_metadata_fields = ['run_accession', 'experiment_accession', 'library_layout', 'fastq_ftp', 'fastq_md5'] + def ena_metadata_fields = params.ena_metadata_fields ? params.ena_metadata_fields.split(',').collect{ it.trim().toLowerCase() } : valid_ena_metadata_fields + if (!ena_metadata_fields.containsAll(valid_ena_metadata_fields)) { + error("Invalid option: '${params.ena_metadata_fields}'. Minimally required fields for '--ena_metadata_fields': '${valid_ena_metadata_fields.join(',')}'") + } +} + +// +// Print a warning after pipeline has completed +// +def sraCurateSamplesheetWarn() { + log.warn "=============================================================================\n" + + " Please double-check the samplesheet that has been auto-created by the pipeline.\n\n" + + " Public databases don't reliably hold information such as strandedness\n" + + " information, controls etc\n\n" + + " All of the sample metadata obtained from the ENA has been appended\n" + + " as additional columns to help you manually curate the samplesheet before\n" + + " running nf-core/other pipelines.\n" + + "===================================================================================" +} + +// +// Convert metadata obtained from the 'synapse show' command to a Groovy map +// +def synapseShowToMap(synapse_file) { + def meta = [:] + def category = '' + synapse_file.eachLine { line -> + def entries = [null, null] + if (!line.startsWith(' ') && !line.trim().isEmpty()) { + category = line.tokenize(':')[0] + } else { + entries = line.trim().tokenize('=') + } + meta["${category}|${entries[0]}"] = entries[1] + } + meta.id = meta['properties|id'] + meta.name = meta['properties|name'] + meta.md5 = meta['File|md5'] + return meta.findAll{ it.value != null } +} + +// +// Print a warning after pipeline has completed +// +def synapseCurateSamplesheetWarn() { + log.warn "=============================================================================\n" + + " Please double-check the samplesheet that has been auto-created by the pipeline.\n\n" + + " Where applicable, default values will be used for sample-specific metadata\n" + + " such as strandedness, controls etc as this information is not provided\n" + + " in a standardised manner when uploading data to Synapse.\n" + + "===================================================================================" +} + +// +// Obtain Sample ID from File Name +// +def synapseSampleNameFromFastQ(input_file, pattern) { + + def sampleids = "" + + def filePattern = pattern.toString() + int p = filePattern.lastIndexOf('/') + if( p != -1 ) + filePattern = filePattern.substring(p+1) + + input_file.each { + String fileName = input_file.getFileName().toString() + + String indexOfWildcards = filePattern.findIndexOf { it=='*' || it=='?' } + String indexOfBrackets = filePattern.findIndexOf { it=='{' || it=='[' } + if( indexOfWildcards==-1 && indexOfBrackets==-1 ) { + if( fileName == filePattern ) + return actual.getSimpleName() + throw new IllegalArgumentException("Not a valid file pair globbing pattern: pattern=$filePattern file=$fileName") + } + + int groupCount = 0 + for( int i=0; i w << email_html } + def output_tf = new File(output_d, "pipeline_report.txt") + output_tf.withWriter { w -> w << email_txt } +} + +// +// Print pipeline summary on completion +// +def completionSummary() { + Map colors = logColours() + if (workflow.success) { + if (workflow.stats.ignoredCount == 0) { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" + } else { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + } + } else { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" + } +} + +// +// Construct and send a notification to a web server as JSON e.g. Microsoft Teams and Slack +// +def imNotification(summary_params) { + def hook_url = params.hook_url + + def summary = [:] + for (group in summary_params.keySet()) { + summary << summary_params[group] + } + + def misc_fields = [:] + misc_fields['start'] = workflow.start + misc_fields['complete'] = workflow.complete + misc_fields['scriptfile'] = workflow.scriptFile + misc_fields['scriptid'] = workflow.scriptId + if (workflow.repository) misc_fields['repository'] = workflow.repository + if (workflow.commitId) misc_fields['commitid'] = workflow.commitId + if (workflow.revision) misc_fields['revision'] = workflow.revision + misc_fields['nxf_version'] = workflow.nextflow.version + misc_fields['nxf_build'] = workflow.nextflow.build + misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp + + def msg_fields = [:] + msg_fields['version'] = NfcoreTemplate.version(workflow) + msg_fields['runName'] = workflow.runName + msg_fields['success'] = workflow.success + msg_fields['dateComplete'] = workflow.complete + msg_fields['duration'] = workflow.duration + msg_fields['exitStatus'] = workflow.exitStatus + msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + msg_fields['errorReport'] = (workflow.errorReport ?: 'None') + msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") + msg_fields['projectDir'] = workflow.projectDir + msg_fields['summary'] = summary << misc_fields + + // Render the JSON template + def engine = new groovy.text.GStringTemplateEngine() + // Different JSON depending on the service provider + // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format + def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" + def hf = new File("${workflow.projectDir}/assets/${json_path}") + def json_template = engine.createTemplate(hf).make(msg_fields) + def json_message = json_template.toString() + + // POST + def post = new URL(hook_url).openConnection(); + post.setRequestMethod("POST") + post.setDoOutput(true) + post.setRequestProperty("Content-Type", "application/json") + post.getOutputStream().write(json_message.getBytes("UTF-8")); + def postRC = post.getResponseCode(); + if (! postRC.equals(200)) { + log.warn(post.getErrorStream().getText()); + } +} diff --git a/subworkflows/nf-core/nfcore_pipeline_utils/meta.yml b/subworkflows/nf-core/nfcore_pipeline_utils/meta.yml new file mode 100644 index 00000000..5721b7e3 --- /dev/null +++ b/subworkflows/nf-core/nfcore_pipeline_utils/meta.yml @@ -0,0 +1,17 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "NFCORE_PIPELINE_UTILS" +description: Subworkflow with utility functions specific to the nf-core pipeline template +keywords: + - utility + - pipeline + - initialise + - version +components: [] +input: [] +output: + - success: + type: boolean + description: | + Dummy output to indicate success +authors: + - "@adamrtalbot" diff --git a/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.function.nf.test b/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.function.nf.test new file mode 100644 index 00000000..265ece39 --- /dev/null +++ b/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.function.nf.test @@ -0,0 +1,120 @@ + +nextflow_function { + + name "Test Functions" + script "../main.nf" + config "subworkflows/nf-core/nfcore_pipeline_utils/tests/nextflow.config" + + test("Test Function checkConfigProvided") { + + function "checkConfigProvided" + + when { + function { + """ + // define inputs of the function here. Example: + // input[0] = 1 + """ + } + } + + then { + assertAll ( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + + } + + test("Test Function workflowCitation") { + + function "workflowCitation" + + when { + function { + """ + // define inputs of the function here. Example: + // input[0] = 1 + """ + } + } + + then { + assertAll ( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + + } + + test("Test Function nfCoreLogo") { + + function "nfCoreLogo" + + when { + function { + """ + input[0] = "9.9.9" + """ + } + } + + then { + assertAll ( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + + } + + + + test("Test Function dashedLine") { + + function "dashedLine" + + when { + function { + """ + // define inputs of the function here. Example: + // input[0] = 1 + """ + } + } + + then { + assertAll ( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + + } + + + + test("Test Function logColours") { + + function "logColours" + + when { + function { + """ + // define inputs of the function here. Example: + // input[0] = 1 + """ + } + } + + then { + assertAll ( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + + } +} \ No newline at end of file diff --git a/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.function.nf.test.snap b/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.function.nf.test.snap new file mode 100644 index 00000000..3ea51f68 --- /dev/null +++ b/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.function.nf.test.snap @@ -0,0 +1,78 @@ +{ + "Test Function logColours": { + "content": [ + { + "reset": "\u001b[0m", + "bold": "\u001b[1m", + "dim": "\u001b[2m", + "underlined": "\u001b[4m", + "blink": "\u001b[5m", + "reverse": "\u001b[7m", + "hidden": "\u001b[8m", + "black": "\u001b[0;30m", + "red": "\u001b[0;31m", + "green": "\u001b[0;32m", + "yellow": "\u001b[0;33m", + "blue": "\u001b[0;34m", + "purple": "\u001b[0;35m", + "cyan": "\u001b[0;36m", + "white": "\u001b[0;37m", + "bblack": "\u001b[1;30m", + "bred": "\u001b[1;31m", + "bgreen": "\u001b[1;32m", + "byellow": "\u001b[1;33m", + "bblue": "\u001b[1;34m", + "bpurple": "\u001b[1;35m", + "bcyan": "\u001b[1;36m", + "bwhite": "\u001b[1;37m", + "ublack": "\u001b[4;30m", + "ured": "\u001b[4;31m", + "ugreen": "\u001b[4;32m", + "uyellow": "\u001b[4;33m", + "ublue": "\u001b[4;34m", + "upurple": "\u001b[4;35m", + "ucyan": "\u001b[4;36m", + "uwhite": "\u001b[4;37m", + "iblack": "\u001b[0;90m", + "ired": "\u001b[0;91m", + "igreen": "\u001b[0;92m", + "iyellow": "\u001b[0;93m", + "iblue": "\u001b[0;94m", + "ipurple": "\u001b[0;95m", + "icyan": "\u001b[0;96m", + "iwhite": "\u001b[0;97m", + "biblack": "\u001b[1;90m", + "bired": "\u001b[1;91m", + "bigreen": "\u001b[1;92m", + "biyellow": "\u001b[1;93m", + "biblue": "\u001b[1;94m", + "bipurple": "\u001b[1;95m", + "bicyan": "\u001b[1;96m", + "biwhite": "\u001b[1;97m" + } + ], + "timestamp": "2023-10-16T14:27:09.843064" + }, + "Test Function checkConfigProvided": { + "content": null, + "timestamp": "2023-10-16T14:27:01.415849" + }, + "Test Function nfCoreLogo": { + "content": [ + "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow 9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" + ], + "timestamp": "2023-10-16T14:27:51.566211" + }, + "Test Function workflowCitation": { + "content": [ + "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" + ], + "timestamp": "2023-10-16T14:27:03.505737" + }, + "Test Function dashedLine": { + "content": [ + "-\u001b[2m----------------------------------------------------\u001b[0m-" + ], + "timestamp": "2023-10-16T14:27:07.721916" + } +} \ No newline at end of file diff --git a/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.workflow.nf.test b/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.workflow.nf.test new file mode 100644 index 00000000..ad7c73a8 --- /dev/null +++ b/subworkflows/nf-core/nfcore_pipeline_utils/tests/main.workflow.nf.test @@ -0,0 +1,33 @@ +nextflow_workflow { + + name "Test Workflow NFCORE_PIPELINE_UTILS" + script "../main.nf" + config "subworkflows/nf-core/nfcore_pipeline_utils/tests/nextflow.config" + workflow "NFCORE_PIPELINE_UTILS" + tag "subworkflows" + tag "subworkflows_nfcore" + tag "nfcorepipelineutils" + tag "subworkflows/nfcorepipelineutils" + + test("Should run without failures") { + + when { + params { + // define parameters here. Example: + // outdir = "tests/results" + } + workflow { + """ + // define inputs of the workflow here. Example: + // input[0] = file("test-file.txt") + """ + } + } + + then { + assert workflow.success + } + + } + +} diff --git a/subworkflows/nf-core/nfcore_pipeline_utils/tests/nextflow.config b/subworkflows/nf-core/nfcore_pipeline_utils/tests/nextflow.config new file mode 100644 index 00000000..53574ffe --- /dev/null +++ b/subworkflows/nf-core/nfcore_pipeline_utils/tests/nextflow.config @@ -0,0 +1,9 @@ +manifest { + name = 'nextflow_workflow' + author = """nf-core""" + homePage = 'https://127.0.0.1' + description = """Dummy pipeline""" + nextflowVersion = '!>=23.04.0' + version = '9.9.9' + doi = 'https://doi.org/10.5281/zenodo.5070524' +} \ No newline at end of file diff --git a/subworkflows/nf-core/nfvalidation_plugin_utils/main.nf b/subworkflows/nf-core/nfvalidation_plugin_utils/main.nf new file mode 100644 index 00000000..22531719 --- /dev/null +++ b/subworkflows/nf-core/nfvalidation_plugin_utils/main.nf @@ -0,0 +1,62 @@ +// +// Subworkflow that uses the nf-validation plugin to render help text and parameter summary +// + +/* +======================================================================================== + IMPORT NF-VALIDATION PLUGIN +======================================================================================== +*/ + +include { paramsHelp; paramsSummaryLog; paramsSummaryMap; validateParameters } from 'plugin/nf-validation' + +/* +======================================================================================== + SUBWORKFLOW DEFINITION +======================================================================================== +*/ + +workflow NF_VALIDATION_PLUGIN_UTILS { + + take: + print_help // bool + workflow_command // string: default commmand used to run pipeline + pre_help_text // string: string to be printed before help text and summary log + post_help_text // string: string to be printed after help text and summary log + validate_params // bool: Validate parameters + schema_filename // path: JSON schema file, null to use default value + + main: + + log.debug "Using schema file: ${schema_filename}" + + // Default values for strings + pre_help_text = pre_help_text ?: '' + post_help_text = post_help_text ?: '' + workflow_command = workflow_command ?: '' + + // + // Print help message if needed + // + if (print_help) { + log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text + System.exit(0) + } + + // + // Print parameter summary to stdout + // + log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text + + // + // Validate parameters relative to the parameter JSON schema + // + if (validate_params){ + validateParameters(parameters_schema: schema_filename) + } + + summary_params = paramsSummaryMap(workflow, parameters_schema: schema_filename) + + emit: + summary_params = summary_params +} diff --git a/subworkflows/nf-core/nfvalidation_plugin_utils/meta.yml b/subworkflows/nf-core/nfvalidation_plugin_utils/meta.yml new file mode 100644 index 00000000..6117b455 --- /dev/null +++ b/subworkflows/nf-core/nfvalidation_plugin_utils/meta.yml @@ -0,0 +1,49 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "nfvalidationpluginutils" +## TODO nf-core: Add a description of the subworkflow and list keywords +description: Sort SAM/BAM/CRAM file +keywords: + - sort + - bam + - sam + - cram +## TODO nf-core: Add a list of the modules and/or subworkflows used in the subworkflow +components: + - samtools/sort + - samtools/index +## TODO nf-core: List all of the channels used as input with a description and their structure +input: + - ch_bam: + type: file + description: | + The input channel containing the BAM/CRAM/SAM files + Structure: [ val(meta), path(bam) ] + pattern: "*.{bam/cram/sam}" +## TODO nf-core: List all of the channels used as output with a descriptions and their structure +output: + - bam: + type: file + description: | + Channel containing BAM files + Structure: [ val(meta), path(bam) ] + pattern: "*.bam" + - bai: + type: file + description: | + Channel containing indexed BAM (BAI) files + Structure: [ val(meta), path(bai) ] + pattern: "*.bai" + - csi: + type: file + description: | + Channel containing CSI files + Structure: [ val(meta), path(csi) ] + pattern: "*.csi" + - versions: + type: file + description: | + File containing software versions + Structure: [ path(versions.yml) ] + pattern: "versions.yml" +authors: + - "@adamrtalbot" diff --git a/subworkflows/nf-core/nfvalidation_plugin_utils/tests/main.nf.test b/subworkflows/nf-core/nfvalidation_plugin_utils/tests/main.nf.test new file mode 100644 index 00000000..6a0efd1e --- /dev/null +++ b/subworkflows/nf-core/nfvalidation_plugin_utils/tests/main.nf.test @@ -0,0 +1,199 @@ +nextflow_workflow { + + name "Test Workflow NF_VALIDATION_PLUGIN_UTILS" + script "../main.nf" + workflow "NF_VALIDATION_PLUGIN_UTILS" + tag "subworkflow" + tag "subworkflow_nfcore" + tag "nfvalidationpluginutils" + tag "subworkflows/nfvalidationpluginutils" + + test("Should run nothing") { + + when { + + params { + monochrome_logs = true + test_data = '' + } + + workflow { + """ + help = false + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assert workflow.success + } + + } + + test("Should run help") { + + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assert workflow.success + assert workflow.exitStatus == 0 + assert workflow.stdout.any { it.contains('Input/output options') } + assert workflow.stdout.any { it.contains('--outdir') } + assert workflow.stdout.any { it.contains('--outdir') } + } + + } + + test("Should run help with command") { + + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = "nextflow run noorg/doesntexist" + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assert workflow.success + assert workflow.exitStatus == 0 + assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } + assert workflow.stdout.any { it.contains('Input/output options') } + assert workflow.stdout.any { it.contains('--outdir') } + assert workflow.stdout.any { it.contains('--outdir') } + } + + } + + test("Should run help with extra text") { + + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = "nextflow run noorg/doesntexist" + pre_help_text = "pre-help-text" + post_help_text = "post-help-text" + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assert workflow.success + assert workflow.exitStatus == 0 + assert workflow.stdout.any { it.contains('pre-help-text') } + assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } + assert workflow.stdout.any { it.contains('Input/output options') } + assert workflow.stdout.any { it.contains('--outdir') } + assert workflow.stdout.any { it.contains('--outdir') } + assert workflow.stdout.any { it.contains('post-help-text') } + } + + } + + test("Should validate params") { + + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = false + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = true + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assert workflow.failed + assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } + assert workflow.stdout.any { it.contains('The following invalid input values have been detected:') } + } + + } + +} diff --git a/subworkflows/nf-core/nfvalidation_plugin_utils/tests/main.nf.test.snap b/subworkflows/nf-core/nfvalidation_plugin_utils/tests/main.nf.test.snap new file mode 100644 index 00000000..ae33a930 --- /dev/null +++ b/subworkflows/nf-core/nfvalidation_plugin_utils/tests/main.nf.test.snap @@ -0,0 +1,26 @@ +{ + "Should run help": { + "content": [ + { + "stderr": [ + + ], + "errorReport": "", + "exitStatus": 0, + "failed": false, + "stdout": [ + + ], + "errorMessage": "", + "trace": { + "tasksFailed": 0, + "tasksCount": 0, + "tasksSucceeded": 0 + }, + "name": "workflow", + "success": true + } + ], + "timestamp": "2023-10-13T13:18:16.933251413" + } +} diff --git a/subworkflows/nf-core/nfvalidation_plugin_utils/tests/nextflow_schema.json b/subworkflows/nf-core/nfvalidation_plugin_utils/tests/nextflow_schema.json new file mode 100644 index 00000000..7626c1c9 --- /dev/null +++ b/subworkflows/nf-core/nfvalidation_plugin_utils/tests/nextflow_schema.json @@ -0,0 +1,96 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", + "title": ". pipeline parameters", + "description": "", + "type": "object", + "definitions": { + "input_output_options": { + "title": "Input/output options", + "type": "object", + "fa_icon": "fas fa-terminal", + "description": "Define where the pipeline should find input data and save output data.", + "required": ["outdir"], + "properties": { + "validate_params": { + "type": "boolean", + "description": "Validate parameters?", + "default": true, + "hidden": true + }, + "outdir": { + "type": "string", + "format": "directory-path", + "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", + "fa_icon": "fas fa-folder-open" + }, + "test_data_base": { + "type": "string", + "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules", + "description": "Base for test data directory", + "hidden": true + }, + "test_data": { + "type": "string", + "description": "Fake test data param", + "hidden": true + } + } + }, + "generic_options": { + "title": "Generic options", + "type": "object", + "fa_icon": "fas fa-file-import", + "description": "Less common options for the pipeline, typically set in a config file.", + "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", + "properties": { + "help": { + "type": "boolean", + "description": "Display help text.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, + "version": { + "type": "boolean", + "description": "Display version and exit.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, + "logo": { + "type": "boolean", + "default": true, + "description": "Display nf-core logo in console output.", + "fa_icon": "fas fa-image", + "hidden": true + }, + "singularity_pull_docker_container": { + "type": "boolean", + "description": "Pull Singularity container from Docker?", + "hidden": true + }, + "publish_dir_mode": { + "type": "string", + "default": "copy", + "description": "Method used to save pipeline results to output directory.", + "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", + "fa_icon": "fas fa-copy", + "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], + "hidden": true + }, + "monochrome_logs": { + "type": "boolean", + "description": "Use monochrome_logs", + "hidden": true + } + } + } + }, + "allOf": [ + { + "$ref": "#/definitions/input_output_options" + }, + { + "$ref": "#/definitions/generic_options" + } + ] +} diff --git a/tests/main.nf.test b/tests/main.nf.test new file mode 100644 index 00000000..ac05aea9 --- /dev/null +++ b/tests/main.nf.test @@ -0,0 +1,28 @@ +nextflow_pipeline { + + name "Test pipeline" + script "../main.nf" + tag "pipeline" + tag "pipeline_fetchngs" + + test("Run with profile test") { + + when { + params { + outdir = "results" + max_cpus = 2 + max_memory = '6.GB' + max_time = '6.h' + input = "$projectDir/tests/sra_ids_test.csv" + validationSchemaIgnoreParams = 'test_data_base,merge_samplesheet_ids,fastq_ftp_ids,test_data' + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + +} diff --git a/tests/sra_ids_test.csv b/tests/sra_ids_test.csv new file mode 100644 index 00000000..d9a20c22 --- /dev/null +++ b/tests/sra_ids_test.csv @@ -0,0 +1,3 @@ +ERR1160846 +GSE214215 +SRR12848126 diff --git a/tests/tags.yml b/tests/tags.yml new file mode 100644 index 00000000..80e5fd75 --- /dev/null +++ b/tests/tags.yml @@ -0,0 +1,5 @@ +pipeline_fetchngs: + - "**.nf" + - "**.config" + - "**.nf.test" + - "**.json" diff --git a/workflows/sra/main.nf b/workflows/sra/main.nf index 6b2ce98b..ddac096d 100644 --- a/workflows/sra/main.nf +++ b/workflows/sra/main.nf @@ -1,36 +1,23 @@ -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - VALIDATE INPUTS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -include { paramsSummaryMap } from 'plugin/nf-validation' - -def summary_params = paramsSummaryMap(workflow) - -WorkflowSra.initialise(params) - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT LOCAL MODULES/SUBWORKFLOWS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ +include { MULTIQC_MAPPINGS_CONFIG } from '../../modules/local/multiqc_mappings_config' +include { SRA_FASTQ_FTP } from '../../modules/local/sra_fastq_ftp' include { SRA_IDS_TO_RUNINFO } from '../../modules/local/sra_ids_to_runinfo' +include { SRA_MERGE_SAMPLESHEET } from '../../modules/local/sra_merge_samplesheet' include { SRA_RUNINFO_TO_FTP } from '../../modules/local/sra_runinfo_to_ftp' -include { SRA_FASTQ_FTP } from '../../modules/local/sra_fastq_ftp' include { SRA_TO_SAMPLESHEET } from '../../modules/local/sra_to_samplesheet' -include { SRA_MERGE_SAMPLESHEET } from '../../modules/local/sra_merge_samplesheet' -include { MULTIQC_MAPPINGS_CONFIG } from '../../modules/local/multiqc_mappings_config' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - IMPORT NF-CORE MODULES/SUBWORKFLOWS + IMPORT NF-CORE SUBWORKFLOWS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../modules/nf-core/custom/dumpsoftwareversions/main' -include { FASTQ_DOWNLOAD_PREFETCH_FASTERQDUMP_SRATOOLS } from '../../subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools/main' +include { FASTQ_DOWNLOAD_PREFETCH_FASTERQDUMP_SRATOOLS } from '../../subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -67,16 +54,15 @@ workflow SRA { .out .tsv .splitCsv(header:true, sep:'\t') - .map { - meta -> - def meta_clone = meta.clone() - meta_clone.single_end = meta_clone.single_end.toBoolean() - return meta_clone + .map{ meta -> + def meta_clone = meta.clone() + meta_clone.single_end = meta_clone.single_end.toBoolean() + return meta_clone } .unique() .set { ch_sra_metadata } - ch_versions = ch_versions.mix(SRA_RUNINFO_TO_FTP.out.versions.first()) + fastq_files = Channel.empty() if (!params.skip_fastq_download) { ch_sra_metadata @@ -107,16 +93,17 @@ workflow SRA { ) ch_versions = ch_versions.mix(FASTQ_DOWNLOAD_PREFETCH_FASTERQDUMP_SRATOOLS.out.versions.first()) - SRA_FASTQ_FTP - .out - .fastq - .mix(FASTQ_DOWNLOAD_PREFETCH_FASTERQDUMP_SRATOOLS.out.reads) + // Isolate FASTQ channel which will be added to emit block + fastq_files + .mix(SRA_FASTQ_FTP.out.fastq, FASTQ_DOWNLOAD_PREFETCH_FASTERQDUMP_SRATOOLS.out.reads) .map { meta, fastq -> def reads = fastq instanceof List ? fastq.flatten() : [ fastq ] def meta_clone = meta.clone() + meta_clone.fastq_1 = reads[0] ? "${params.outdir}/fastq/${reads[0].getName()}" : '' meta_clone.fastq_2 = reads[1] && !meta.single_end ? "${params.outdir}/fastq/${reads[1].getName()}" : '' + return meta_clone } .set { ch_sra_metadata } @@ -144,33 +131,21 @@ workflow SRA { // // MODULE: Create a MutiQC config file with sample name mappings // + ch_sample_mappings_yml = Channel.empty() if (params.sample_mapping_fields) { MULTIQC_MAPPINGS_CONFIG ( SRA_MERGE_SAMPLESHEET.out.mappings ) ch_versions = ch_versions.mix(MULTIQC_MAPPINGS_CONFIG.out.versions) + ch_sample_mappings_yml = MULTIQC_MAPPINGS_CONFIG.out.yml } - // - // MODULE: Dump software versions for all tools used in the workflow - // - CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') - ) -} - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - COMPLETION EMAIL AND SUMMARY -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -workflow.onComplete { - if (params.email || params.email_on_fail) { - NfcoreTemplate.email(workflow, params, summary_params, projectDir, log) - } - NfcoreTemplate.summary(workflow, params, log) - WorkflowSra.curateSamplesheetWarn(log) + emit: + fastq = fastq_files + samplesheet = SRA_MERGE_SAMPLESHEET.out.samplesheet + mappings = SRA_MERGE_SAMPLESHEET.out.mappings + sample_mappings = ch_sample_mappings_yml + versions = ch_versions.unique() } /* diff --git a/workflows/sra/nextflow.config b/workflows/sra/nextflow.config new file mode 100644 index 00000000..6b66fcd5 --- /dev/null +++ b/workflows/sra/nextflow.config @@ -0,0 +1,8 @@ +includeConfig "../../modules/local/multiqc_mappings_config/nextflow.config" +includeConfig "../../modules/local/sra_fastq_ftp/nextflow.config" +includeConfig "../../modules/local/sra_ids_to_runinfo/nextflow.config" +includeConfig "../../modules/local/sra_merge_samplesheet/nextflow.config" +includeConfig "../../modules/local/sra_runinfo_to_ftp/nextflow.config" +includeConfig "../../modules/local/sra_to_samplesheet/nextflow.config" +includeConfig "../../modules/nf-core/sratools/prefetch/nextflow.config" +includeConfig "../../subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools/nextflow.config" diff --git a/workflows/sra/tests/main.nf.test b/workflows/sra/tests/main.nf.test new file mode 100644 index 00000000..ef5b7c9d --- /dev/null +++ b/workflows/sra/tests/main.nf.test @@ -0,0 +1,36 @@ +nextflow_workflow { + + name "Test workflow: sra/main.nf" + script "../main.nf" + workflow "SRA" + tag "workflows" + tag "workflows_sra" + tag "multiqc_mappings_config" + tag "sra_fastq_ftp" + tag "sra_ids_to_runinfo" + tag "sra_merge_samplesheet" + tag "sra_runinfo_to_ftp" + tag "sra_to_samplesheet" + tag "sra_default_parameters" + + test("Parameters: default") { + + when { + workflow { + """ + input[0] = Channel.from('ERR1160846', 'GSE214215', 'SRR12848126') + """ + } + params { + outdir = "results" + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/workflows/sra/tests/main.nf.test.snap b/workflows/sra/tests/main.nf.test.snap new file mode 100644 index 00000000..885b7d6a --- /dev/null +++ b/workflows/sra/tests/main.nf.test.snap @@ -0,0 +1,55 @@ +{ + "Parameters: default": { + "content": [ + { + "0": [ + + ], + "1": [ + "samplesheet.csv:md5,eda595eca3cb5ed450641565c38390c2" + ], + "2": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "3": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "4": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ], + "fastq": [ + + ], + "mappings": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "sample_mappings": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "samplesheet": [ + "samplesheet.csv:md5,eda595eca3cb5ed450641565c38390c2" + ], + "versions": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ] + } + ], + "timestamp": "2023-10-18T09:49:19.020482551" + } +} \ No newline at end of file diff --git a/workflows/sra/tests/sra_custom_ena_metadata_fields.nf.test b/workflows/sra/tests/sra_custom_ena_metadata_fields.nf.test new file mode 100644 index 00000000..62cabb7b --- /dev/null +++ b/workflows/sra/tests/sra_custom_ena_metadata_fields.nf.test @@ -0,0 +1,39 @@ +nextflow_workflow { + + name "Test workflow: sra/main.nf" + script "../main.nf" + workflow "SRA" + tag "workflows" + tag "workflows_sra" + tag "multiqc_mappings_config" + tag "sra_fastq_ftp" + tag "sra_ids_to_runinfo" + tag "sra_merge_samplesheet" + tag "sra_runinfo_to_ftp" + tag "sra_to_samplesheet" + tag "sra_custom_ena_metadata_fields" + + test("Parameters: --nf_core_pipeline rnaseq --ena_metadata_fields ... --sample_mapping_fields ...") { + + when { + workflow { + """ + input[0] = Channel.from('ERR1160846', 'GSE214215', 'SRR12848126') + """ + } + params { + outdir = "results" + nf_core_pipeline = "rnaseq" + ena_metadata_fields = "run_accession,experiment_accession,library_layout,fastq_ftp,fastq_md5" + sample_mapping_fields = "run_accession,library_layout" + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/workflows/sra/tests/sra_custom_ena_metadata_fields.nf.test.snap b/workflows/sra/tests/sra_custom_ena_metadata_fields.nf.test.snap new file mode 100644 index 00000000..45e007d0 --- /dev/null +++ b/workflows/sra/tests/sra_custom_ena_metadata_fields.nf.test.snap @@ -0,0 +1,55 @@ +{ + "Parameters: --nf_core_pipeline rnaseq --ena_metadata_fields ... --sample_mapping_fields ...": { + "content": [ + { + "0": [ + + ], + "1": [ + "samplesheet.csv:md5,a9e9da506288c364af14d46b86dafeb1" + ], + "2": [ + "id_mappings.csv:md5,8ed2bd72d432eff4a8ed7d909af6f60c" + ], + "3": [ + [ + "multiqc_config.yml:md5,3bc981f0de28023083cdf13691d249d5", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "4": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ], + "fastq": [ + + ], + "mappings": [ + "id_mappings.csv:md5,8ed2bd72d432eff4a8ed7d909af6f60c" + ], + "sample_mappings": [ + [ + "multiqc_config.yml:md5,3bc981f0de28023083cdf13691d249d5", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "samplesheet": [ + "samplesheet.csv:md5,a9e9da506288c364af14d46b86dafeb1" + ], + "versions": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ] + } + ], + "timestamp": "2023-10-18T09:36:57.154883885" + } +} \ No newline at end of file diff --git a/workflows/sra/tests/sra_force_sratools_download.nf.test b/workflows/sra/tests/sra_force_sratools_download.nf.test new file mode 100644 index 00000000..df751a6b --- /dev/null +++ b/workflows/sra/tests/sra_force_sratools_download.nf.test @@ -0,0 +1,37 @@ +nextflow_workflow { + + name "Test workflow: sra/main.nf" + script "../main.nf" + workflow "SRA" + tag "workflows" + tag "workflows_sra" + tag "multiqc_mappings_config" + tag "sra_fastq_ftp" + tag "sra_ids_to_runinfo" + tag "sra_merge_samplesheet" + tag "sra_runinfo_to_ftp" + tag "sra_to_samplesheet" + tag "sra_force_sratools_download" + + test("Parameters: --force_sratools_download") { + + when { + workflow { + """ + input[0] = Channel.from('ERR1160846', 'GSE214215', 'SRR12848126') + """ + } + params { + outdir = "results" + force_sratools_download = true + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/workflows/sra/tests/sra_force_sratools_download.nf.test.snap b/workflows/sra/tests/sra_force_sratools_download.nf.test.snap new file mode 100644 index 00000000..f445296a --- /dev/null +++ b/workflows/sra/tests/sra_force_sratools_download.nf.test.snap @@ -0,0 +1,53 @@ +{ + "Parameters: --force_sratools_download": { + "content": [ + { + "0": [ + + ], + "1": [ + "samplesheet.csv:md5,7e6a65057fe000f562132ae9e608e87b" + ], + "2": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "3": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "4": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47" + ], + "fastq": [ + + ], + "mappings": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "sample_mappings": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "samplesheet": [ + "samplesheet.csv:md5,7e6a65057fe000f562132ae9e608e87b" + ], + "versions": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47" + ] + } + ], + "timestamp": "2023-10-18T09:43:16.468494798" + } +} \ No newline at end of file diff --git a/workflows/sra/tests/sra_nf_core_pipeline_atacseq.nf.test b/workflows/sra/tests/sra_nf_core_pipeline_atacseq.nf.test new file mode 100644 index 00000000..f4837f76 --- /dev/null +++ b/workflows/sra/tests/sra_nf_core_pipeline_atacseq.nf.test @@ -0,0 +1,37 @@ +nextflow_workflow { + + name "Test workflow: sra/main.nf" + script "../main.nf" + workflow "SRA" + tag "workflows" + tag "workflows_sra" + tag "multiqc_mappings_config" + tag "sra_fastq_ftp" + tag "sra_ids_to_runinfo" + tag "sra_merge_samplesheet" + tag "sra_runinfo_to_ftp" + tag "sra_to_samplesheet" + tag "sra_nf_core_pipeline_atacseq" + + test("Parameters: --nf_core_pipeline atacseq") { + + when { + workflow { + """ + input[0] = Channel.from('ERR1160846', 'GSE214215', 'SRR12848126') + """ + } + params { + outdir = "results" + nf_core_pipeline = "atacseq" + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/workflows/sra/tests/sra_nf_core_pipeline_atacseq.nf.test.snap b/workflows/sra/tests/sra_nf_core_pipeline_atacseq.nf.test.snap new file mode 100644 index 00000000..3e9e7f31 --- /dev/null +++ b/workflows/sra/tests/sra_nf_core_pipeline_atacseq.nf.test.snap @@ -0,0 +1,55 @@ +{ + "Parameters: --nf_core_pipeline atacseq": { + "content": [ + { + "0": [ + + ], + "1": [ + "samplesheet.csv:md5,35292d2bea78b087d75c3333f6319b6b" + ], + "2": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "3": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "4": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ], + "fastq": [ + + ], + "mappings": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "sample_mappings": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "samplesheet": [ + "samplesheet.csv:md5,35292d2bea78b087d75c3333f6319b6b" + ], + "versions": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ] + } + ], + "timestamp": "2023-10-18T09:47:56.240260295" + } +} \ No newline at end of file diff --git a/workflows/sra/tests/sra_nf_core_pipeline_rnaseq.nf.test b/workflows/sra/tests/sra_nf_core_pipeline_rnaseq.nf.test new file mode 100644 index 00000000..871207ab --- /dev/null +++ b/workflows/sra/tests/sra_nf_core_pipeline_rnaseq.nf.test @@ -0,0 +1,37 @@ +nextflow_workflow { + + name "Test workflow: sra/main.nf" + script "../main.nf" + workflow "SRA" + tag "workflows" + tag "workflows_sra" + tag "multiqc_mappings_config" + tag "sra_fastq_ftp" + tag "sra_ids_to_runinfo" + tag "sra_merge_samplesheet" + tag "sra_runinfo_to_ftp" + tag "sra_to_samplesheet" + tag "sra_nf_core_pipeline_rnaseq" + + test("Parameters: --nf_core_pipeline rnaseq") { + + when { + workflow { + """ + input[0] = Channel.from('ERR1160846', 'GSE214215', 'SRR12848126') + """ + } + params { + outdir = "results" + nf_core_pipeline = "rnaseq" + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/workflows/sra/tests/sra_nf_core_pipeline_rnaseq.nf.test.snap b/workflows/sra/tests/sra_nf_core_pipeline_rnaseq.nf.test.snap new file mode 100644 index 00000000..8805d3ac --- /dev/null +++ b/workflows/sra/tests/sra_nf_core_pipeline_rnaseq.nf.test.snap @@ -0,0 +1,55 @@ +{ + "Parameters: --nf_core_pipeline rnaseq": { + "content": [ + { + "0": [ + + ], + "1": [ + "samplesheet.csv:md5,b7ac0ff84f5031daaa9405c736a37125" + ], + "2": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "3": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "4": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ], + "fastq": [ + + ], + "mappings": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "sample_mappings": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "samplesheet": [ + "samplesheet.csv:md5,b7ac0ff84f5031daaa9405c736a37125" + ], + "versions": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ] + } + ], + "timestamp": "2023-10-18T09:40:15.590249346" + } +} \ No newline at end of file diff --git a/workflows/sra/tests/sra_nf_core_pipeline_taxprofiler.nf.test b/workflows/sra/tests/sra_nf_core_pipeline_taxprofiler.nf.test new file mode 100644 index 00000000..995301dd --- /dev/null +++ b/workflows/sra/tests/sra_nf_core_pipeline_taxprofiler.nf.test @@ -0,0 +1,37 @@ +nextflow_workflow { + + name "Test workflow: sra/main.nf" + script "../main.nf" + workflow "SRA" + tag "workflows" + tag "workflows_sra" + tag "multiqc_mappings_config" + tag "sra_fastq_ftp" + tag "sra_ids_to_runinfo" + tag "sra_merge_samplesheet" + tag "sra_runinfo_to_ftp" + tag "sra_to_samplesheet" + tag "sra_nf_core_pipeline_taxprofiler" + + test("Parameters: --nf_core_pipeline taxprofiler") { + + when { + workflow { + """ + input[0] = Channel.from('ERR1160846', 'GSE214215', 'SRR12848126') + """ + } + params { + outdir = "results" + nf_core_pipeline = "taxprofiler" + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/workflows/sra/tests/sra_nf_core_pipeline_taxprofiler.nf.test.snap b/workflows/sra/tests/sra_nf_core_pipeline_taxprofiler.nf.test.snap new file mode 100644 index 00000000..0853eef8 --- /dev/null +++ b/workflows/sra/tests/sra_nf_core_pipeline_taxprofiler.nf.test.snap @@ -0,0 +1,55 @@ +{ + "Parameters: --nf_core_pipeline taxprofiler": { + "content": [ + { + "0": [ + + ], + "1": [ + "samplesheet.csv:md5,296836917bbdf34bda106a313424fa95" + ], + "2": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "3": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "4": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ], + "fastq": [ + + ], + "mappings": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "sample_mappings": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "samplesheet": [ + "samplesheet.csv:md5,296836917bbdf34bda106a313424fa95" + ], + "versions": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ] + } + ], + "timestamp": "2023-10-18T09:49:51.04235046" + } +} \ No newline at end of file diff --git a/workflows/sra/tests/sra_nf_core_pipeline_viralrecon.nf.test b/workflows/sra/tests/sra_nf_core_pipeline_viralrecon.nf.test new file mode 100644 index 00000000..4bf6a3f0 --- /dev/null +++ b/workflows/sra/tests/sra_nf_core_pipeline_viralrecon.nf.test @@ -0,0 +1,37 @@ +nextflow_workflow { + + name "Test workflow: sra/main.nf" + script "../main.nf" + workflow "SRA" + tag "workflows" + tag "workflows_sra" + tag "multiqc_mappings_config" + tag "sra_fastq_ftp" + tag "sra_ids_to_runinfo" + tag "sra_merge_samplesheet" + tag "sra_runinfo_to_ftp" + tag "sra_to_samplesheet" + tag "sra_nf_core_pipeline_viralrecon" + + test("Parameters: --nf_core_pipeline viralrecon") { + + when { + workflow { + """ + input[0] = Channel.from('ERR1160846', 'GSE214215', 'SRR12848126') + """ + } + params { + outdir = "results" + nf_core_pipeline = "viralrecon" + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/workflows/sra/tests/sra_nf_core_pipeline_viralrecon.nf.test.snap b/workflows/sra/tests/sra_nf_core_pipeline_viralrecon.nf.test.snap new file mode 100644 index 00000000..e364b3a9 --- /dev/null +++ b/workflows/sra/tests/sra_nf_core_pipeline_viralrecon.nf.test.snap @@ -0,0 +1,55 @@ +{ + "Parameters: --nf_core_pipeline viralrecon": { + "content": [ + { + "0": [ + + ], + "1": [ + "samplesheet.csv:md5,eda595eca3cb5ed450641565c38390c2" + ], + "2": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "3": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "4": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ], + "fastq": [ + + ], + "mappings": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "sample_mappings": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "samplesheet": [ + "samplesheet.csv:md5,eda595eca3cb5ed450641565c38390c2" + ], + "versions": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,45e4a369df2d3af023b8622aa3f6d5a8", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47", + "versions.yml:md5,e977678bd13a54fc9c31addf135be15e" + ] + } + ], + "timestamp": "2023-10-18T09:48:46.587502263" + } +} \ No newline at end of file diff --git a/workflows/sra/tests/sra_skip_fastq_download.nf.test b/workflows/sra/tests/sra_skip_fastq_download.nf.test new file mode 100644 index 00000000..705facbb --- /dev/null +++ b/workflows/sra/tests/sra_skip_fastq_download.nf.test @@ -0,0 +1,37 @@ +nextflow_workflow { + + name "Test workflow: sra/main.nf" + script "../main.nf" + workflow "SRA" + tag "workflows" + tag "workflows_sra" + tag "multiqc_mappings_config" + tag "sra_fastq_ftp" + tag "sra_ids_to_runinfo" + tag "sra_merge_samplesheet" + tag "sra_runinfo_to_ftp" + tag "sra_to_samplesheet" + tag "sra_skip_fastq_download" + + test("Parameters: --skip_fastq_download") { + + when { + workflow { + """ + input[0] = Channel.from('ERR1160846', 'GSE214215', 'SRR12848126') + """ + } + params { + outdir = "results" + skip_fastq_download = true + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/workflows/sra/tests/sra_skip_fastq_download.nf.test.snap b/workflows/sra/tests/sra_skip_fastq_download.nf.test.snap new file mode 100644 index 00000000..cdb0965c --- /dev/null +++ b/workflows/sra/tests/sra_skip_fastq_download.nf.test.snap @@ -0,0 +1,51 @@ +{ + "Parameters: --skip_fastq_download": { + "content": [ + { + "0": [ + + ], + "1": [ + "samplesheet.csv:md5,0a9b99509bcc64ca245408b6bb634f15" + ], + "2": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "3": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "4": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47" + ], + "fastq": [ + + ], + "mappings": [ + "id_mappings.csv:md5,3e70c965568c59f8c8fbb8e99e7a8b79" + ], + "sample_mappings": [ + [ + "multiqc_config.yml:md5,6d4c3e5137704358474330207f5f2b5c", + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9" + ] + ], + "samplesheet": [ + "samplesheet.csv:md5,0a9b99509bcc64ca245408b6bb634f15" + ], + "versions": [ + "versions.yml:md5,1496d1cbc9041e07ab8a0c25f0b054d9", + "versions.yml:md5,9b17045ca8bdc272cb3f9d349a81d206", + "versions.yml:md5,b21338b74b9a8fe0c7a114f7686c07cd", + "versions.yml:md5,b52279f7d6b891a6523d9321f3f85b47" + ] + } + ], + "timestamp": "2023-10-18T09:50:04.300075498" + } +} \ No newline at end of file diff --git a/workflows/sra/tests/tags.yml b/workflows/sra/tests/tags.yml new file mode 100644 index 00000000..ae41e37d --- /dev/null +++ b/workflows/sra/tests/tags.yml @@ -0,0 +1,16 @@ +sra_custom_ena_metadata_fields: + - workflows/sra/** +sra_default_parameters: + - workflows/sra/** +sra_force_sratools_download: + - workflows/sra/** +sra_nf_core_pipeline_atacseq: + - workflows/sra/** +sra_nf_core_pipeline_rnaseq: + - workflows/sra/** +sra_nf_core_pipeline_taxprofiler: + - workflows/sra/** +sra_nf_core_pipeline_viralrecon: + - workflows/sra/** +sra_skip_fastq_download: + - workflows/sra/** diff --git a/workflows/synapse/main.nf b/workflows/synapse/main.nf index abe7cb22..6b5ea02c 100644 --- a/workflows/synapse/main.nf +++ b/workflows/synapse/main.nf @@ -1,20 +1,3 @@ -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - VALIDATE INPUTS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -include { paramsSummaryMap } from 'plugin/nf-validation' - -def summary_params = paramsSummaryMap(workflow) - -// Create channel for synapse config -if (params.synapse_config) { - ch_synapse_config = file(params.synapse_config, checkIfExists: true) -} else { - exit 1, 'Please provide a Synapse config file for download authentication!' -} - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT LOCAL MODULES/SUBWORKFLOWS @@ -27,14 +10,6 @@ include { SYNAPSE_GET } from '../../modules/local/synapse_get' include { SYNAPSE_TO_SAMPLESHEET } from '../../modules/local/synapse_to_samplesheet' include { SYNAPSE_MERGE_SAMPLESHEET } from '../../modules/local/synapse_merge_samplesheet' -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - IMPORT NF-CORE MODULES/SUBWORKFLOWS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../modules/nf-core/custom/dumpsoftwareversions/main' - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ RUN MAIN WORKFLOW @@ -44,7 +19,8 @@ include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../modules/nf-core/custom/dumps workflow SYNAPSE { take: - ids // channel: [ ids ] + ids // channel: [ ids ] + ch_synapse_config // channel: [ synapse_config ] main: ch_versions = Channel.empty() @@ -80,7 +56,7 @@ workflow SYNAPSE { SYNAPSE_SHOW .out .metadata - .map { it -> WorkflowSynapse.synapseShowToMap(it) } + .map { it -> WorkflowMain.synapseShowToMap(it) } .set { ch_samples_meta } // @@ -96,14 +72,14 @@ workflow SYNAPSE { SYNAPSE_GET .out .fastq - .map { meta, fastq -> [ WorkflowSynapse.sampleNameFromFastQ( fastq , "*{1,2}*"), fastq ] } + .map { meta, fastq -> [ WorkflowMain.synapseSampleNameFromFastQ( fastq , "*{1,2}*"), fastq ] } .groupTuple(sort: { it -> it.baseName }) .set { ch_fastq } SYNAPSE_GET .out .fastq - .map { meta, fastq -> [ WorkflowSynapse.sampleNameFromFastQ( fastq , "*{1,2}*"), meta.id ] } + .map { meta, fastq -> [ WorkflowMain.synapseSampleNameFromFastQ( fastq , "*{1,2}*"), meta.id ] } .groupTuple() .join(ch_fastq) .map { id, synids, fastq -> @@ -129,26 +105,10 @@ workflow SYNAPSE { ) ch_versions = ch_versions.mix(SYNAPSE_MERGE_SAMPLESHEET.out.versions) - // - // MODULE: Dump software versions for all tools used in the workflow - // - CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') - ) -} - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - COMPLETION EMAIL AND SUMMARY -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -workflow.onComplete { - if (params.email || params.email_on_fail) { - NfcoreTemplate.email(workflow, params, summary_params, projectDir, log) - } - NfcoreTemplate.summary(workflow, params, log) - WorkflowSynapse.curateSamplesheetWarn(log) + emit: + fastq = ch_fastq + samplesheet = SYNAPSE_MERGE_SAMPLESHEET.out.samplesheet + versions = ch_versions.unique() } /* diff --git a/workflows/synapse/nextflow.config b/workflows/synapse/nextflow.config new file mode 100644 index 00000000..01b71558 --- /dev/null +++ b/workflows/synapse/nextflow.config @@ -0,0 +1,5 @@ +includeConfig "../../modules/local/synapse_get/nextflow.config" +includeConfig "../../modules/local/synapse_to_samplesheet/nextflow.config" +includeConfig "../../modules/local/synapse_list/nextflow.config" +includeConfig "../../modules/local/synapse_merge_samplesheet/nextflow.config" +includeConfig "../../modules/local/synapse_show/nextflow.config" \ No newline at end of file