diff --git a/.nf-core.yml b/.nf-core.yml
index 2e90e7d0..3f08fd2d 100644
--- a/.nf-core.yml
+++ b/.nf-core.yml
@@ -5,3 +5,8 @@ lint:
     - config_defaults:
         - params.bamtools_filter_se_config
         - params.bamtools_filter_pe_config
+  files_unchanged:
+    - .github/CONTRIBUTING.md
+    - .github/workflows/branch.yml
+    - .github/workflows/linting_comment.yml
+    - .github/workflows/linting.yml
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b936b306..1d0102de 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -23,6 +23,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 - Updated pipeline template to [nf-core/tools 2.10](https://github.com/nf-core/tools/releases/tag/2.10).
 - [[#367](https://github.com/nf-core/chipseq/issues/367)] - Get rid of `CheckIfExists` for params paths.
 - [[#370](https://github.com/nf-core/chipseq/issues/370)] - Fix stack overflow exceptions in phantompeakqualtools ([see here](https://github.com/kundajelab/phantompeakqualtools/issues/3)).
+- [[PR #391](https://github.com/nf-core/chipseq/pull/391)] - Get rid of the `lib` folder and rearrange the pipeline accordingly.
 
 ### Software dependencies
 
diff --git a/conf/base.config b/conf/base.config
index b5fcc4af..42ba5a89 100644
--- a/conf/base.config
+++ b/conf/base.config
@@ -57,7 +57,4 @@ process {
         errorStrategy = 'retry'
         maxRetries    = 2
     }
-    withName:CUSTOM_DUMPSOFTWAREVERSIONS {
-        cache = false
-    }
 }
diff --git a/conf/modules.config b/conf/modules.config
index 7f923394..59af9e66 100644
--- a/conf/modules.config
+++ b/conf/modules.config
@@ -30,14 +30,6 @@ process {
         ]
     }
 
-    withName: CUSTOM_DUMPSOFTWAREVERSIONS {
-        publishDir = [
-            path: { "${params.outdir}/pipeline_info" },
-            mode: params.publish_dir_mode,
-            pattern: '*_versions.yml'
-        ]
-    }
-
     withName: 'KHMER_UNIQUEKMERS' {
         publishDir = [ enabled: false ]
     }
diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy
deleted file mode 100755
index e248e4c3..00000000
--- a/lib/NfcoreTemplate.groovy
+++ /dev/null
@@ -1,356 +0,0 @@
-//
-// This file holds several functions used within the nf-core pipeline template.
-//
-
-import org.yaml.snakeyaml.Yaml
-import groovy.json.JsonOutput
-import nextflow.extension.FilesEx
-
-class NfcoreTemplate {
-
-    //
-    // Check AWS Batch related parameters have been specified correctly
-    //
-    public static void awsBatch(workflow, params) {
-        if (workflow.profile.contains('awsbatch')) {
-            // Check params.awsqueue and params.awsregion have been set if running on AWSBatch
-            assert (params.awsqueue && params.awsregion) : "Specify correct --awsqueue and --awsregion parameters on AWSBatch!"
-            // Check outdir paths to be S3 buckets if running on AWSBatch
-            assert params.outdir.startsWith('s3:')       : "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!"
-        }
-    }
-
-    //
-    //  Warn if a -profile or Nextflow config has not been provided to run the pipeline
-    //
-    public static void checkConfigProvided(workflow, log) {
-        if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) {
-            log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" +
-                    "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" +
-                    "   (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" +
-                    "   (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" +
-                    "   (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" +
-                    "Please refer to the quick start section and usage docs for the pipeline.\n "
-        }
-    }
-
-    //
-    // Generate version string
-    //
-    public static String version(workflow) {
-        String version_string = ""
-
-        if (workflow.manifest.version) {
-            def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
-            version_string += "${prefix_v}${workflow.manifest.version}"
-        }
-
-        if (workflow.commitId) {
-            def git_shortsha = workflow.commitId.substring(0, 7)
-            version_string += "-g${git_shortsha}"
-        }
-
-        return version_string
-    }
-
-    //
-    // Construct and send completion email
-    //
-    public static void email(workflow, params, summary_params, projectDir, log, multiqc_report=[]) {
-
-        // Set up the e-mail variables
-        def subject = "[$workflow.manifest.name] Successful: $workflow.runName"
-        if (!workflow.success) {
-            subject = "[$workflow.manifest.name] FAILED: $workflow.runName"
-        }
-
-        def summary = [:]
-        for (group in summary_params.keySet()) {
-            summary << summary_params[group]
-        }
-
-        def misc_fields = [:]
-        misc_fields['Date Started']              = workflow.start
-        misc_fields['Date Completed']            = workflow.complete
-        misc_fields['Pipeline script file path'] = workflow.scriptFile
-        misc_fields['Pipeline script hash ID']   = workflow.scriptId
-        if (workflow.repository) misc_fields['Pipeline repository Git URL']    = workflow.repository
-        if (workflow.commitId)   misc_fields['Pipeline repository Git Commit'] = workflow.commitId
-        if (workflow.revision)   misc_fields['Pipeline Git branch/tag']        = workflow.revision
-        misc_fields['Nextflow Version']           = workflow.nextflow.version
-        misc_fields['Nextflow Build']             = workflow.nextflow.build
-        misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp
-
-        def email_fields = [:]
-        email_fields['version']      = NfcoreTemplate.version(workflow)
-        email_fields['runName']      = workflow.runName
-        email_fields['success']      = workflow.success
-        email_fields['dateComplete'] = workflow.complete
-        email_fields['duration']     = workflow.duration
-        email_fields['exitStatus']   = workflow.exitStatus
-        email_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
-        email_fields['errorReport']  = (workflow.errorReport ?: 'None')
-        email_fields['commandLine']  = workflow.commandLine
-        email_fields['projectDir']   = workflow.projectDir
-        email_fields['summary']      = summary << misc_fields
-
-        // On success try attach the multiqc report
-        def mqc_report = null
-        try {
-            if (workflow.success) {
-                mqc_report = multiqc_report.getVal()
-                if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) {
-                    if (mqc_report.size() > 1) {
-                        log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one"
-                    }
-                    mqc_report = mqc_report[0]
-                }
-            }
-        } catch (all) {
-            if (multiqc_report) {
-                log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email"
-            }
-        }
-
-        // Check if we are only sending emails on failure
-        def email_address = params.email
-        if (!params.email && params.email_on_fail && !workflow.success) {
-            email_address = params.email_on_fail
-        }
-
-        // Render the TXT template
-        def engine       = new groovy.text.GStringTemplateEngine()
-        def tf           = new File("$projectDir/assets/email_template.txt")
-        def txt_template = engine.createTemplate(tf).make(email_fields)
-        def email_txt    = txt_template.toString()
-
-        // Render the HTML template
-        def hf            = new File("$projectDir/assets/email_template.html")
-        def html_template = engine.createTemplate(hf).make(email_fields)
-        def email_html    = html_template.toString()
-
-        // Render the sendmail template
-        def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit
-        def smail_fields           = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ]
-        def sf                     = new File("$projectDir/assets/sendmail_template.txt")
-        def sendmail_template      = engine.createTemplate(sf).make(smail_fields)
-        def sendmail_html          = sendmail_template.toString()
-
-        // Send the HTML e-mail
-        Map colors = logColours(params.monochrome_logs)
-        if (email_address) {
-            try {
-                if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') }
-                // Try to send HTML e-mail using sendmail
-                def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html")
-                sendmail_tf.withWriter { w -> w << sendmail_html }
-                [ 'sendmail', '-t' ].execute() << sendmail_html
-                log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-"
-            } catch (all) {
-                // Catch failures and try with plaintext
-                def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ]
-                if ( mqc_report != null && mqc_report.size() <= max_multiqc_email_size.toBytes() ) {
-                    mail_cmd += [ '-A', mqc_report ]
-                }
-                mail_cmd.execute() << email_html
-                log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-"
-            }
-        }
-
-        // Write summary e-mail HTML to a file
-        def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html")
-        output_hf.withWriter { w -> w << email_html }
-        FilesEx.copyTo(output_hf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.html");
-        output_hf.delete()
-
-        // Write summary e-mail TXT to a file
-        def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt")
-        output_tf.withWriter { w -> w << email_txt }
-        FilesEx.copyTo(output_tf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.txt");
-        output_tf.delete()
-    }
-
-    //
-    // Construct and send a notification to a web server as JSON
-    // e.g. Microsoft Teams and Slack
-    //
-    public static void IM_notification(workflow, params, summary_params, projectDir, log) {
-        def hook_url = params.hook_url
-
-        def summary = [:]
-        for (group in summary_params.keySet()) {
-            summary << summary_params[group]
-        }
-
-        def misc_fields = [:]
-        misc_fields['start']                                = workflow.start
-        misc_fields['complete']                             = workflow.complete
-        misc_fields['scriptfile']                           = workflow.scriptFile
-        misc_fields['scriptid']                             = workflow.scriptId
-        if (workflow.repository) misc_fields['repository']  = workflow.repository
-        if (workflow.commitId)   misc_fields['commitid']    = workflow.commitId
-        if (workflow.revision)   misc_fields['revision']    = workflow.revision
-        misc_fields['nxf_version']                          = workflow.nextflow.version
-        misc_fields['nxf_build']                            = workflow.nextflow.build
-        misc_fields['nxf_timestamp']                        = workflow.nextflow.timestamp
-
-        def msg_fields = [:]
-        msg_fields['version']      = NfcoreTemplate.version(workflow)
-        msg_fields['runName']      = workflow.runName
-        msg_fields['success']      = workflow.success
-        msg_fields['dateComplete'] = workflow.complete
-        msg_fields['duration']     = workflow.duration
-        msg_fields['exitStatus']   = workflow.exitStatus
-        msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
-        msg_fields['errorReport']  = (workflow.errorReport ?: 'None')
-        msg_fields['commandLine']  = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "")
-        msg_fields['projectDir']   = workflow.projectDir
-        msg_fields['summary']      = summary << misc_fields
-
-        // Render the JSON template
-        def engine       = new groovy.text.GStringTemplateEngine()
-        // Different JSON depending on the service provider
-        // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format
-        def json_path     = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json"
-        def hf            = new File("$projectDir/assets/${json_path}")
-        def json_template = engine.createTemplate(hf).make(msg_fields)
-        def json_message  = json_template.toString()
-
-        // POST
-        def post = new URL(hook_url).openConnection();
-        post.setRequestMethod("POST")
-        post.setDoOutput(true)
-        post.setRequestProperty("Content-Type", "application/json")
-        post.getOutputStream().write(json_message.getBytes("UTF-8"));
-        def postRC = post.getResponseCode();
-        if (! postRC.equals(200)) {
-            log.warn(post.getErrorStream().getText());
-        }
-    }
-
-    //
-    // Dump pipeline parameters in a json file
-    //
-    public static void dump_parameters(workflow, params) {
-        def timestamp  = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
-        def filename   = "params_${timestamp}.json"
-        def temp_pf    = new File(workflow.launchDir.toString(), ".${filename}")
-        def jsonStr    = JsonOutput.toJson(params)
-        temp_pf.text   = JsonOutput.prettyPrint(jsonStr)
-
-        FilesEx.copyTo(temp_pf.toPath(), "${params.outdir}/pipeline_info/params_${timestamp}.json")
-        temp_pf.delete()
-    }
-
-    //
-    // Print pipeline summary on completion
-    //
-    public static void summary(workflow, params, log) {
-        Map colors = logColours(params.monochrome_logs)
-        if (workflow.success) {
-            if (workflow.stats.ignoredCount == 0) {
-                log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-"
-            } else {
-                log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-"
-            }
-        } else {
-            log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-"
-        }
-    }
-
-    //
-    // ANSII Colours used for terminal logging
-    //
-    public static Map logColours(Boolean monochrome_logs) {
-        Map colorcodes = [:]
-
-        // Reset / Meta
-        colorcodes['reset']      = monochrome_logs ? '' : "\033[0m"
-        colorcodes['bold']       = monochrome_logs ? '' : "\033[1m"
-        colorcodes['dim']        = monochrome_logs ? '' : "\033[2m"
-        colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m"
-        colorcodes['blink']      = monochrome_logs ? '' : "\033[5m"
-        colorcodes['reverse']    = monochrome_logs ? '' : "\033[7m"
-        colorcodes['hidden']     = monochrome_logs ? '' : "\033[8m"
-
-        // Regular Colors
-        colorcodes['black']      = monochrome_logs ? '' : "\033[0;30m"
-        colorcodes['red']        = monochrome_logs ? '' : "\033[0;31m"
-        colorcodes['green']      = monochrome_logs ? '' : "\033[0;32m"
-        colorcodes['yellow']     = monochrome_logs ? '' : "\033[0;33m"
-        colorcodes['blue']       = monochrome_logs ? '' : "\033[0;34m"
-        colorcodes['purple']     = monochrome_logs ? '' : "\033[0;35m"
-        colorcodes['cyan']       = monochrome_logs ? '' : "\033[0;36m"
-        colorcodes['white']      = monochrome_logs ? '' : "\033[0;37m"
-
-        // Bold
-        colorcodes['bblack']     = monochrome_logs ? '' : "\033[1;30m"
-        colorcodes['bred']       = monochrome_logs ? '' : "\033[1;31m"
-        colorcodes['bgreen']     = monochrome_logs ? '' : "\033[1;32m"
-        colorcodes['byellow']    = monochrome_logs ? '' : "\033[1;33m"
-        colorcodes['bblue']      = monochrome_logs ? '' : "\033[1;34m"
-        colorcodes['bpurple']    = monochrome_logs ? '' : "\033[1;35m"
-        colorcodes['bcyan']      = monochrome_logs ? '' : "\033[1;36m"
-        colorcodes['bwhite']     = monochrome_logs ? '' : "\033[1;37m"
-
-        // Underline
-        colorcodes['ublack']     = monochrome_logs ? '' : "\033[4;30m"
-        colorcodes['ured']       = monochrome_logs ? '' : "\033[4;31m"
-        colorcodes['ugreen']     = monochrome_logs ? '' : "\033[4;32m"
-        colorcodes['uyellow']    = monochrome_logs ? '' : "\033[4;33m"
-        colorcodes['ublue']      = monochrome_logs ? '' : "\033[4;34m"
-        colorcodes['upurple']    = monochrome_logs ? '' : "\033[4;35m"
-        colorcodes['ucyan']      = monochrome_logs ? '' : "\033[4;36m"
-        colorcodes['uwhite']     = monochrome_logs ? '' : "\033[4;37m"
-
-        // High Intensity
-        colorcodes['iblack']     = monochrome_logs ? '' : "\033[0;90m"
-        colorcodes['ired']       = monochrome_logs ? '' : "\033[0;91m"
-        colorcodes['igreen']     = monochrome_logs ? '' : "\033[0;92m"
-        colorcodes['iyellow']    = monochrome_logs ? '' : "\033[0;93m"
-        colorcodes['iblue']      = monochrome_logs ? '' : "\033[0;94m"
-        colorcodes['ipurple']    = monochrome_logs ? '' : "\033[0;95m"
-        colorcodes['icyan']      = monochrome_logs ? '' : "\033[0;96m"
-        colorcodes['iwhite']     = monochrome_logs ? '' : "\033[0;97m"
-
-        // Bold High Intensity
-        colorcodes['biblack']    = monochrome_logs ? '' : "\033[1;90m"
-        colorcodes['bired']      = monochrome_logs ? '' : "\033[1;91m"
-        colorcodes['bigreen']    = monochrome_logs ? '' : "\033[1;92m"
-        colorcodes['biyellow']   = monochrome_logs ? '' : "\033[1;93m"
-        colorcodes['biblue']     = monochrome_logs ? '' : "\033[1;94m"
-        colorcodes['bipurple']   = monochrome_logs ? '' : "\033[1;95m"
-        colorcodes['bicyan']     = monochrome_logs ? '' : "\033[1;96m"
-        colorcodes['biwhite']    = monochrome_logs ? '' : "\033[1;97m"
-
-        return colorcodes
-    }
-
-    //
-    // Does what is says on the tin
-    //
-    public static String dashedLine(monochrome_logs) {
-        Map colors = logColours(monochrome_logs)
-        return "-${colors.dim}----------------------------------------------------${colors.reset}-"
-    }
-
-    //
-    // nf-core logo
-    //
-    public static String logo(workflow, monochrome_logs) {
-        Map colors = logColours(monochrome_logs)
-        String workflow_version = NfcoreTemplate.version(workflow)
-        String.format(
-            """\n
-            ${dashedLine(monochrome_logs)}
-                                                    ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset}
-            ${colors.blue}        ___     __   __   __   ___     ${colors.green}/,-._.--~\'${colors.reset}
-            ${colors.blue}  |\\ | |__  __ /  ` /  \\ |__) |__         ${colors.yellow}}  {${colors.reset}
-            ${colors.blue}  | \\| |       \\__, \\__/ |  \\ |___     ${colors.green}\\`-._,-`-,${colors.reset}
-                                                    ${colors.green}`._,._,\'${colors.reset}
-            ${colors.purple}  ${workflow.manifest.name} ${workflow_version}${colors.reset}
-            ${dashedLine(monochrome_logs)}
-            """.stripIndent()
-        )
-    }
-}
diff --git a/lib/Utils.groovy b/lib/Utils.groovy
deleted file mode 100644
index 8d030f4e..00000000
--- a/lib/Utils.groovy
+++ /dev/null
@@ -1,47 +0,0 @@
-//
-// This file holds several Groovy functions that could be useful for any Nextflow pipeline
-//
-
-import org.yaml.snakeyaml.Yaml
-
-class Utils {
-
-    //
-    // When running with -profile conda, warn if channels have not been set-up appropriately
-    //
-    public static void checkCondaChannels(log) {
-        Yaml parser = new Yaml()
-        def channels = []
-        try {
-            def config = parser.load("conda config --show channels".execute().text)
-            channels = config.channels
-        } catch(NullPointerException | IOException e) {
-            log.warn "Could not verify conda channel configuration."
-            return
-        }
-
-        // Check that all channels are present
-        // This channel list is ordered by required channel priority.
-        def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults']
-        def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean
-
-        // Check that they are in the right order
-        def channel_priority_violation = false
-        def n = required_channels_in_order.size()
-        for (int i = 0; i < n - 1; i++) {
-            channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1]))
-        }
-
-        if (channels_missing | channel_priority_violation) {
-            log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
-                "  There is a problem with your Conda configuration!\n\n" +
-                "  You will need to set-up the conda-forge and bioconda channels correctly.\n" +
-                "  Please refer to https://bioconda.github.io/\n" +
-                "  The observed channel order is \n" +
-                "  ${channels}\n" +
-                "  but the following channel order is required:\n" +
-                "  ${required_channels_in_order}\n" +
-                "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
-        }
-    }
-}
diff --git a/lib/WorkflowChipseq.groovy b/lib/WorkflowChipseq.groovy
deleted file mode 100755
index 1dbd7ffc..00000000
--- a/lib/WorkflowChipseq.groovy
+++ /dev/null
@@ -1,156 +0,0 @@
-//
-// This file holds several functions specific to the workflow/chipseq.nf in the nf-core/chipseq pipeline
-//
-
-import nextflow.Nextflow
-import groovy.text.SimpleTemplateEngine
-
-class WorkflowChipseq {
-
-    //
-    // Check and validate parameters
-    //
-    public static void initialise(params, log) {
-        genomeExistsError(params, log)
-
-        if (!params.gtf && !params.gff) {
-            def error_string = "No GTF or GFF3 annotation specified! The pipeline requires at least one of these files."
-            Nextflow.error(error_string)
-        }
-
-        if (params.gtf && params.gff) {
-            gtfGffWarn(log)
-        }
-
-        if (!params.macs_gsize) {
-            macsGsizeWarn(log)
-        }
-
-        if (!params.read_length && !params.macs_gsize) {
-            def error_string = "Both '--read_length' and '--macs_gsize' not specified! Please specify either to infer MACS2 genome size for peak calling."
-            Nextflow.error(error_string)
-        }
-    }
-
-    //
-    // Get workflow summary for MultiQC
-    //
-    public static String paramsSummaryMultiqc(workflow, summary) {
-        String summary_section = ''
-        for (group in summary.keySet()) {
-            def group_params = summary.get(group)  // This gets the parameters of that particular group
-            if (group_params) {
-                summary_section += "    <p style=\"font-size:110%\"><b>$group</b></p>\n"
-                summary_section += "    <dl class=\"dl-horizontal\">\n"
-                for (param in group_params.keySet()) {
-                    summary_section += "        <dt>$param</dt><dd><samp>${group_params.get(param) ?: '<span style=\"color:#999999;\">N/A</a>'}</samp></dd>\n"
-                }
-                summary_section += "    </dl>\n"
-            }
-        }
-
-        String yaml_file_text  = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n"
-        yaml_file_text        += "description: ' - this information is collected when the pipeline is started.'\n"
-        yaml_file_text        += "section_name: '${workflow.manifest.name} Workflow Summary'\n"
-        yaml_file_text        += "section_href: 'https://github.com/${workflow.manifest.name}'\n"
-        yaml_file_text        += "plot_type: 'html'\n"
-        yaml_file_text        += "data: |\n"
-        yaml_file_text        += "${summary_section}"
-        return yaml_file_text
-    }
-
-    //
-    // Generate methods description for MultiQC
-    //
-
-    public static String toolCitationText(params) {
-
-        // TODO nf-core: Optionally add in-text citation tools to this list.
-        // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "",
-        // Uncomment function in methodsDescriptionText to render in MultiQC report
-        def citation_text = [
-                "Tools used in the workflow included:",
-                "FastQC (Andrews 2010),",
-                "MultiQC (Ewels et al. 2016)",
-                "."
-            ].join(' ').trim()
-
-        return citation_text
-    }
-
-    public static String toolBibliographyText(params) {
-
-        // TODO Optionally add bibliographic entries to this list.
-        // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "<li>Author (2023) Pub name, Journal, DOI</li>" : "",
-        // Uncomment function in methodsDescriptionText to render in MultiQC report
-        def reference_text = [
-                "<li>Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).</li>",
-                "<li>Ewels, P., Magnusson, M., Lundin, S., & KΓ€ller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354</li>"
-            ].join(' ').trim()
-
-        return reference_text
-    }
-
-    public static String methodsDescriptionText(run_workflow, mqc_methods_yaml, params) {
-        // Convert  to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file
-        def meta = [:]
-        meta.workflow = run_workflow.toMap()
-        meta["manifest_map"] = run_workflow.manifest.toMap()
-
-        // Pipeline DOI
-        meta["doi_text"] = meta.manifest_map.doi ? "(doi: <a href=\'https://doi.org/${meta.manifest_map.doi}\'>${meta.manifest_map.doi}</a>)" : ""
-        meta["nodoi_text"] = meta.manifest_map.doi ? "": "<li>If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. </li>"
-
-        // Tool references
-        meta["tool_citations"] = ""
-        meta["tool_bibliography"] = ""
-
-        // TODO Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled!
-        //meta["tool_citations"] = toolCitationText(params).replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".")
-        //meta["tool_bibliography"] = toolBibliographyText(params)
-
-
-        def methods_text = mqc_methods_yaml.text
-
-        def engine =  new SimpleTemplateEngine()
-        def description_html = engine.createTemplate(methods_text).make(meta)
-
-        return description_html
-    }
-
-    //
-    // Exit pipeline if incorrect --genome key provided
-    //
-    private static void genomeExistsError(params, log) {
-        if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) {
-            def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
-                "  Genome '${params.genome}' not found in any config files provided to the pipeline.\n" +
-                "  Currently, the available genome keys are:\n" +
-                "  ${params.genomes.keySet().join(", ")}\n" +
-                "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
-            Nextflow.error(error_string)
-        }
-    }
-
-    //
-    // Print a warning if both GTF and GFF have been provided
-    //
-    private static void gtfGffWarn(log) {
-        log.warn "=============================================================================\n" +
-            "  Both '--gtf' and '--gff' parameters have been provided.\n" +
-            "  Using GTF file as priority.\n" +
-            "==================================================================================="
-    }
-
-    //
-    // Print a warning if macs_gsize parameter has not been provided
-    //
-    private static void macsGsizeWarn(log) {
-        log.warn "=============================================================================\n" +
-            "  --macs_gsize parameter has not been provided.\n" +
-            "  It will be auto-calculated by 'khmer unique-kmers.py' using the '--read_length' parameter.\n" +
-            "  Explicitly provide '--macs_gsize' to change this behaviour.\n" +
-            "==================================================================================="
-    }
-
-}
diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy
deleted file mode 100755
index 56cacad8..00000000
--- a/lib/WorkflowMain.groovy
+++ /dev/null
@@ -1,84 +0,0 @@
-//
-// This file holds several functions specific to the main.nf workflow in the nf-core/chipseq pipeline
-//
-
-import nextflow.Nextflow
-
-class WorkflowMain {
-
-    //
-    // Citation string for pipeline
-    //
-    public static String citation(workflow) {
-        return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" +
-            "* The pipeline\n" +
-            "  https://doi.org/10.5281/zenodo.3240506\n\n" +
-            "* The nf-core framework\n" +
-            "  https://doi.org/10.1038/s41587-020-0439-x\n\n" +
-            "* Software dependencies\n" +
-            "  https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md"
-    }
-
-    //
-    // Validate parameters and print summary to screen
-    //
-    public static void initialise(workflow, params, log, args) {
-
-        // Print workflow version and exit on --version
-        if (params.version) {
-            String workflow_version = NfcoreTemplate.version(workflow)
-            log.info "${workflow.manifest.name} ${workflow_version}"
-            System.exit(0)
-        }
-
-        // Check that a -profile or Nextflow config has been provided to run the pipeline
-        NfcoreTemplate.checkConfigProvided(workflow, log)
-        // Check that the profile doesn't contain spaces and doesn't end with a trailing comma
-        checkProfile(workflow.profile, args, log)
-
-        // Check that conda channels are set-up correctly
-        if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) {
-            Utils.checkCondaChannels(log)
-        }
-
-        // Check AWS batch settings
-        NfcoreTemplate.awsBatch(workflow, params)
-    }
-    //
-    // Get attribute from genome config file e.g. fasta
-    //
-    public static Object getGenomeAttribute(params, attribute) {
-        if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) {
-            if (params.genomes[ params.genome ].containsKey(attribute)) {
-                return params.genomes[ params.genome ][ attribute ]
-            }
-        }
-        return null
-    }
-
-    //
-    // Get macs genome size (macs_gsize)
-    //
-    public static Long getMacsGsize(params) {
-        def val = null
-        if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) {
-            if (params.genomes[ params.genome ].containsKey('macs_gsize')) {
-                if (params.genomes[ params.genome ][ 'macs_gsize' ].containsKey(params.read_length.toString())) {
-                    val = params.genomes[ params.genome ][ 'macs_gsize' ][ params.read_length.toString() ]
-                }
-            }
-        }
-        return val
-    }
-
-    // Exit pipeline if --profile contains spaces
-    //
-    private static void checkProfile(profile, args, log) {
-        if (profile.endsWith(',')) {
-            Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
-        }
-        if (args[0]) {
-            log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n      Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
-        }
-    }
-}
diff --git a/main.nf b/main.nf
index e9c7a38e..c0623ffd 100755
--- a/main.nf
+++ b/main.nf
@@ -13,72 +13,128 @@ nextflow.enable.dsl = 2
 
 /*
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-    GENOME PARAMETER VALUES
+    IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS / WORKFLOWS
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 */
-
-params.fasta         = WorkflowMain.getGenomeAttribute(params, 'fasta')
-params.bwa_index     = WorkflowMain.getGenomeAttribute(params, 'bwa')
-params.bowtie2_index = WorkflowMain.getGenomeAttribute(params, 'bowtie2')
-params.chromap_index = WorkflowMain.getGenomeAttribute(params, 'chromap')
-params.star_index    = WorkflowMain.getGenomeAttribute(params, 'star')
-params.gtf           = WorkflowMain.getGenomeAttribute(params, 'gtf')
-params.gff           = WorkflowMain.getGenomeAttribute(params, 'gff')
-params.gene_bed      = WorkflowMain.getGenomeAttribute(params, 'gene_bed')
-params.blacklist     = WorkflowMain.getGenomeAttribute(params, 'blacklist')
-params.macs_gsize    = WorkflowMain.getMacsGsize(params)
+include { CHIPSEQ                 } from './workflows/chipseq'
+include { PREPARE_GENOME          } from './subworkflows/local/prepare_genome'
+include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_chipseq_pipeline'
+include { PIPELINE_COMPLETION     } from './subworkflows/local/utils_nfcore_chipseq_pipeline'
+include { getGenomeAttribute      } from './subworkflows/local/utils_nfcore_chipseq_pipeline'
+include { getMacsGsize            } from './subworkflows/local/utils_nfcore_chipseq_pipeline'
 
 /*
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-    VALIDATE & PRINT PARAMETER SUMMARY
+    GENOME PARAMETER VALUES
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 */
 
-include { validateParameters; paramsHelp } from 'plugin/nf-validation'
-
-// Print help message if needed
-if (params.help) {
-    def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs)
-    def citation = '\n' + WorkflowMain.citation(workflow) + '\n'
-    def String command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker"
-    log.info logo + paramsHelp(command) + citation + NfcoreTemplate.dashedLine(params.monochrome_logs)
-    System.exit(0)
-}
-
-// Validate input parameters
-if (params.validate_params) {
-    validateParameters()
-}
-
-WorkflowMain.initialise(workflow, params, log, args)
+params.fasta         = getGenomeAttribute('fasta')
+params.bwa_index     = getGenomeAttribute('bwa')
+params.bowtie2_index = getGenomeAttribute('bowtie2')
+params.chromap_index = getGenomeAttribute('chromap')
+params.star_index    = getGenomeAttribute('star')
+params.gtf           = getGenomeAttribute('gtf')
+params.gff           = getGenomeAttribute('gff')
+params.gene_bed      = getGenomeAttribute('gene_bed')
+params.blacklist     = getGenomeAttribute('blacklist')
+params.macs_gsize    = getMacsGsize(params)
 
 /*
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-    NAMED WORKFLOW FOR PIPELINE
+    NAMED WORKFLOWS FOR PIPELINE
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 */
 
-include { CHIPSEQ } from './workflows/chipseq'
-
 //
 // WORKFLOW: Run main nf-core/chipseq analysis pipeline
 //
 workflow NFCORE_CHIPSEQ {
-    CHIPSEQ ()
+
+    main:
+    ch_versions = Channel.empty()
+
+    // SUBWORKFLOW: Prepare genome files
+    PREPARE_GENOME (
+        params.genome,
+        params.genomes,
+        params.aligner,
+        params.fasta,
+        params.gtf,
+        params.gff,
+        params.blacklist,
+        params.gene_bed,
+        params.bwa_index,
+        params.bowtie2_index,
+        params.chromap_index,
+        params.star_index,
+    )
+    ch_versions = ch_versions.mix(PREPARE_GENOME.out.versions)
+
+    //
+    // WORKFLOW: Run nf-core/chipseq workflow
+    //
+    ch_samplesheet = Channel.value(file(params.input, checkIfExists: true))
+
+    CHIPSEQ(
+        params.input,
+        ch_versions,
+        PREPARE_GENOME.out.fasta,
+        PREPARE_GENOME.out.fai,
+        PREPARE_GENOME.out.gtf,
+        PREPARE_GENOME.out.gene_bed,
+        PREPARE_GENOME.out.chrom_sizes,
+        PREPARE_GENOME.out.filtered_bed,
+        PREPARE_GENOME.out.bwa_index,
+        PREPARE_GENOME.out.bowtie2_index,
+        PREPARE_GENOME.out.chromap_index,
+        PREPARE_GENOME.out.star_index
+    )
+
+    emit:
+    multiqc_report = CHIPSEQ.out.multiqc_report // channel: /path/to/multiqc_report.html
+    versions       = ch_versions                // channel: [version1, version2, ...]
 }
 
 /*
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-    RUN ALL WORKFLOWS
+    RUN MAIN WORKFLOWS
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 */
 
-//
-// WORKFLOW: Execute a single named workflow for the pipeline
-// See: https://github.com/nf-core/rnaseq/issues/619
-//
 workflow {
+
+    main:
+
+    //
+    // SUBWORKFLOW: Run initialisation tasks
+    //
+    PIPELINE_INITIALISATION (
+        params.version,
+        params.help,
+        params.validate_params,
+        params.monochrome_logs,
+        args,
+        params.outdir
+    )
+
+    //
+    // WORKFLOW: Run main workflow
+    //
     NFCORE_CHIPSEQ ()
+
+    //
+    // SUBWORKFLOW: Run completion tasks
+    //
+    PIPELINE_COMPLETION (
+        params.email,
+        params.email_on_fail,
+        params.plaintext_email,
+        params.outdir,
+        params.monochrome_logs,
+        params.hook_url,
+        NFCORE_CHIPSEQ.out.multiqc_report
+    )
 }
 
 /*
diff --git a/modules.json b/modules.json
index 0ae8b30f..14d352e9 100644
--- a/modules.json
+++ b/modules.json
@@ -35,11 +35,6 @@
                         "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
                         "installed_by": ["modules"]
                     },
-                    "custom/dumpsoftwareversions": {
-                        "branch": "master",
-                        "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93",
-                        "installed_by": ["modules"]
-                    },
                     "custom/getchromsizes": {
                         "branch": "master",
                         "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
@@ -102,7 +97,7 @@
                     },
                     "phantompeakqualtools": {
                         "branch": "master",
-                        "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+                        "git_sha": "2dfe9afa90fefc70e320140e5f41287f01f324b0",
                         "installed_by": ["modules"]
                     },
                     "picard/collectmultiplemetrics": {
@@ -218,6 +213,21 @@
                         "branch": "master",
                         "git_sha": "cfd937a668919d948f6fcbf4218e79de50c2f36f",
                         "installed_by": ["subworkflows"]
+                    },
+                    "utils_nextflow_pipeline": {
+                        "branch": "master",
+                        "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa",
+                        "installed_by": ["subworkflows"]
+                    },
+                    "utils_nfcore_pipeline": {
+                        "branch": "master",
+                        "git_sha": "92de218a329bfc9a9033116eb5f65fd270e72ba3",
+                        "installed_by": ["subworkflows"]
+                    },
+                    "utils_nfvalidation_plugin": {
+                        "branch": "master",
+                        "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa",
+                        "installed_by": ["subworkflows"]
                     }
                 }
             }
diff --git a/modules/local/multiqc.nf b/modules/local/multiqc.nf
index f5b6092e..31da0b07 100644
--- a/modules/local/multiqc.nf
+++ b/modules/local/multiqc.nf
@@ -7,11 +7,9 @@ process MULTIQC {
         'biocontainers/multiqc:1.13a--pyhdfd78af_1' }"
 
     input:
+    path workflow_summary
     path multiqc_config
     path mqc_custom_config
-    path software_versions
-    path workflow_summary
-    path methods_description
     path logo
 
     path ('fastqc/*')
diff --git a/modules/local/multiqc_custom_phantompeakqualtools.nf b/modules/local/multiqc_custom_phantompeakqualtools.nf
index c9239bde..7fc74a2e 100644
--- a/modules/local/multiqc_custom_phantompeakqualtools.nf
+++ b/modules/local/multiqc_custom_phantompeakqualtools.nf
@@ -1,9 +1,9 @@
 process MULTIQC_CUSTOM_PHANTOMPEAKQUALTOOLS {
     tag "$meta.id"
-    conda "conda-forge::r-base=3.5.1"
+    conda "conda-forge::r-base=4.3.3"
     container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
-        'https://depot.galaxyproject.org/singularity/r-base:3.5.1':
-        'biocontainers/r-base:3.5.1' }"
+        'oras://community.wave.seqera.io/library/r-base:4.3.3--452dec8277637366':
+        'community.wave.seqera.io/library/r-base:4.3.3--14bb33ac537aea22' }"
 
     input:
     tuple val(meta), path(spp), path(rdata)
diff --git a/modules/nf-core/custom/dumpsoftwareversions/environment.yml b/modules/nf-core/custom/dumpsoftwareversions/environment.yml
deleted file mode 100644
index 9b3272bc..00000000
--- a/modules/nf-core/custom/dumpsoftwareversions/environment.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-name: custom_dumpsoftwareversions
-channels:
-  - conda-forge
-  - bioconda
-  - defaults
-dependencies:
-  - bioconda::multiqc=1.19
diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf
deleted file mode 100644
index f2187611..00000000
--- a/modules/nf-core/custom/dumpsoftwareversions/main.nf
+++ /dev/null
@@ -1,24 +0,0 @@
-process CUSTOM_DUMPSOFTWAREVERSIONS {
-    label 'process_single'
-
-    // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container
-    conda "${moduleDir}/environment.yml"
-    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
-        'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' :
-        'biocontainers/multiqc:1.19--pyhdfd78af_0' }"
-
-    input:
-    path versions
-
-    output:
-    path "software_versions.yml"    , emit: yml
-    path "software_versions_mqc.yml", emit: mqc_yml
-    path "versions.yml"             , emit: versions
-
-    when:
-    task.ext.when == null || task.ext.when
-
-    script:
-    def args = task.ext.args ?: ''
-    template 'dumpsoftwareversions.py'
-}
diff --git a/modules/nf-core/custom/dumpsoftwareversions/meta.yml b/modules/nf-core/custom/dumpsoftwareversions/meta.yml
deleted file mode 100644
index 5f15a5fd..00000000
--- a/modules/nf-core/custom/dumpsoftwareversions/meta.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json
-name: custom_dumpsoftwareversions
-description: Custom module used to dump software versions within the nf-core pipeline template
-keywords:
-  - custom
-  - dump
-  - version
-tools:
-  - custom:
-      description: Custom module used to dump software versions within the nf-core pipeline template
-      homepage: https://github.com/nf-core/tools
-      documentation: https://github.com/nf-core/tools
-      licence: ["MIT"]
-input:
-  - versions:
-      type: file
-      description: YML file containing software versions
-      pattern: "*.yml"
-output:
-  - yml:
-      type: file
-      description: Standard YML file containing software versions
-      pattern: "software_versions.yml"
-  - mqc_yml:
-      type: file
-      description: MultiQC custom content YML file containing software versions
-      pattern: "software_versions_mqc.yml"
-  - versions:
-      type: file
-      description: File containing software versions
-      pattern: "versions.yml"
-authors:
-  - "@drpatelh"
-  - "@grst"
-maintainers:
-  - "@drpatelh"
-  - "@grst"
diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
deleted file mode 100755
index da033408..00000000
--- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python
-
-
-"""Provide functions to merge multiple versions.yml files."""
-
-
-import yaml
-import platform
-from textwrap import dedent
-
-
-def _make_versions_html(versions):
-    """Generate a tabular HTML output of all versions for MultiQC."""
-    html = [
-        dedent(
-            """\\
-            <style>
-            #nf-core-versions tbody:nth-child(even) {
-                background-color: #f2f2f2;
-            }
-            </style>
-            <table class="table" style="width:100%" id="nf-core-versions">
-                <thead>
-                    <tr>
-                        <th> Process Name </th>
-                        <th> Software </th>
-                        <th> Version  </th>
-                    </tr>
-                </thead>
-            """
-        )
-    ]
-    for process, tmp_versions in sorted(versions.items()):
-        html.append("<tbody>")
-        for i, (tool, version) in enumerate(sorted(tmp_versions.items())):
-            html.append(
-                dedent(
-                    f"""\\
-                    <tr>
-                        <td><samp>{process if (i == 0) else ''}</samp></td>
-                        <td><samp>{tool}</samp></td>
-                        <td><samp>{version}</samp></td>
-                    </tr>
-                    """
-                )
-            )
-        html.append("</tbody>")
-    html.append("</table>")
-    return "\\n".join(html)
-
-
-def main():
-    """Load all version files and generate merged output."""
-    versions_this_module = {}
-    versions_this_module["${task.process}"] = {
-        "python": platform.python_version(),
-        "yaml": yaml.__version__,
-    }
-
-    with open("$versions") as f:
-        versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module
-
-    # aggregate versions by the module name (derived from fully-qualified process name)
-    versions_by_module = {}
-    for process, process_versions in versions_by_process.items():
-        module = process.split(":")[-1]
-        try:
-            if versions_by_module[module] != process_versions:
-                raise AssertionError(
-                    "We assume that software versions are the same between all modules. "
-                    "If you see this error-message it means you discovered an edge-case "
-                    "and should open an issue in nf-core/tools. "
-                )
-        except KeyError:
-            versions_by_module[module] = process_versions
-
-    versions_by_module["Workflow"] = {
-        "Nextflow": "$workflow.nextflow.version",
-        "$workflow.manifest.name": "$workflow.manifest.version",
-    }
-
-    versions_mqc = {
-        "id": "software_versions",
-        "section_name": "${workflow.manifest.name} Software Versions",
-        "section_href": "https://github.com/${workflow.manifest.name}",
-        "plot_type": "html",
-        "description": "are collected at run time from the software output.",
-        "data": _make_versions_html(versions_by_module),
-    }
-
-    with open("software_versions.yml", "w") as f:
-        yaml.dump(versions_by_module, f, default_flow_style=False)
-    with open("software_versions_mqc.yml", "w") as f:
-        yaml.dump(versions_mqc, f, default_flow_style=False)
-
-    with open("versions.yml", "w") as f:
-        yaml.dump(versions_this_module, f, default_flow_style=False)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
deleted file mode 100644
index b1e1630b..00000000
--- a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
+++ /dev/null
@@ -1,43 +0,0 @@
-nextflow_process {
-
-    name "Test Process CUSTOM_DUMPSOFTWAREVERSIONS"
-    script "../main.nf"
-    process "CUSTOM_DUMPSOFTWAREVERSIONS"
-    tag "modules"
-    tag "modules_nfcore"
-    tag "custom"
-    tag "dumpsoftwareversions"
-    tag "custom/dumpsoftwareversions"
-
-    test("Should run without failures") {
-        when {
-            process {
-                """
-                def tool1_version = '''
-                TOOL1:
-                    tool1: 0.11.9
-                '''.stripIndent()
-
-                def tool2_version = '''
-                TOOL2:
-                    tool2: 1.9
-                '''.stripIndent()
-
-                input[0] = Channel.of(tool1_version, tool2_version).collectFile()
-                """
-            }
-        }
-
-        then {
-            assertAll(
-                { assert process.success },
-                { assert snapshot(
-                    process.out.versions,
-                    file(process.out.mqc_yml[0]).readLines()[0..10],
-                    file(process.out.yml[0]).readLines()[0..7]
-                    ).match()
-                }
-            )
-        }
-    }
-}
diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
deleted file mode 100644
index 5f59a936..00000000
--- a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
+++ /dev/null
@@ -1,33 +0,0 @@
-{
-    "Should run without failures": {
-        "content": [
-            [
-                "versions.yml:md5,76d454d92244589d32455833f7c1ba6d"
-            ],
-            [
-                "data: \"<style>\\n#nf-core-versions tbody:nth-child(even) {\\n    background-color: #f2f2f2;\\n\\",
-                "  }\\n</style>\\n<table class=\\\"table\\\" style=\\\"width:100%\\\" id=\\\"nf-core-versions\\\"\\",
-                "  >\\n    <thead>\\n        <tr>\\n            <th> Process Name </th>\\n            <th>\\",
-                "  \\ Software </th>\\n            <th> Version  </th>\\n        </tr>\\n    </thead>\\n\\",
-                "  \\n<tbody>\\n<tr>\\n    <td><samp>CUSTOM_DUMPSOFTWAREVERSIONS</samp></td>\\n    <td><samp>python</samp></td>\\n\\",
-                "  \\    <td><samp>3.11.7</samp></td>\\n</tr>\\n\\n<tr>\\n    <td><samp></samp></td>\\n \\",
-                "  \\   <td><samp>yaml</samp></td>\\n    <td><samp>5.4.1</samp></td>\\n</tr>\\n\\n</tbody>\\n\\",
-                "  <tbody>\\n<tr>\\n    <td><samp>TOOL1</samp></td>\\n    <td><samp>tool1</samp></td>\\n\\",
-                "  \\    <td><samp>0.11.9</samp></td>\\n</tr>\\n\\n</tbody>\\n<tbody>\\n<tr>\\n    <td><samp>TOOL2</samp></td>\\n\\",
-                "  \\    <td><samp>tool2</samp></td>\\n    <td><samp>1.9</samp></td>\\n</tr>\\n\\n</tbody>\\n\\",
-                "  <tbody>\\n<tr>\\n    <td><samp>Workflow</samp></td>\\n    <td><samp>Nextflow</samp></td>\\n\\"
-            ],
-            [
-                "CUSTOM_DUMPSOFTWAREVERSIONS:",
-                "  python: 3.11.7",
-                "  yaml: 5.4.1",
-                "TOOL1:",
-                "  tool1: 0.11.9",
-                "TOOL2:",
-                "  tool2: '1.9'",
-                "Workflow:"
-            ]
-        ],
-        "timestamp": "2024-01-09T23:01:18.710682"
-    }
-}
\ No newline at end of file
diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml b/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml
deleted file mode 100644
index 405aa24a..00000000
--- a/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-custom/dumpsoftwareversions:
-  - modules/nf-core/custom/dumpsoftwareversions/**
diff --git a/modules/nf-core/phantompeakqualtools/environment.yml b/modules/nf-core/phantompeakqualtools/environment.yml
new file mode 100644
index 00000000..095b7b4b
--- /dev/null
+++ b/modules/nf-core/phantompeakqualtools/environment.yml
@@ -0,0 +1,9 @@
+---
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
+name: "phantompeakqualtools"
+channels:
+  - conda-forge
+  - bioconda
+  - defaults
+dependencies:
+  - "bioconda::phantompeakqualtools=1.2.2"
diff --git a/modules/nf-core/phantompeakqualtools/main.nf b/modules/nf-core/phantompeakqualtools/main.nf
index edcdf2ce..f0f7dc63 100644
--- a/modules/nf-core/phantompeakqualtools/main.nf
+++ b/modules/nf-core/phantompeakqualtools/main.nf
@@ -3,10 +3,10 @@ process PHANTOMPEAKQUALTOOLS {
     label 'process_medium'
 
     // WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions.
-    conda "bioconda::phantompeakqualtools=1.2.2"
+    conda "${moduleDir}/environment.yml"
     container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
-        'https://depot.galaxyproject.org/singularity/phantompeakqualtools:1.2.2--0' :
-        'biocontainers/phantompeakqualtools:1.2.2--0' }"
+        'oras://community.wave.seqera.io/library/phantompeakqualtools:1.2.2--50be7727b2a72700' :
+        'community.wave.seqera.io/library/phantompeakqualtools:1.2.2--f8026fe2526a5e18' }"
 
     input:
     tuple val(meta), path(bam)
@@ -34,4 +34,18 @@ process PHANTOMPEAKQUALTOOLS {
         phantompeakqualtools: $VERSION
     END_VERSIONS
     """
+
+    stub:
+    def prefix = task.ext.prefix ?: "${meta.id}"
+    def VERSION = '1.2.2' // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions.
+    """
+    touch ${prefix}.spp.pdf
+    touch ${prefix}.spp.Rdata
+    touch ${prefix}.spp.out
+
+    cat <<-END_VERSIONS > versions.yml
+    "${task.process}":
+        phantompeakqualtools: $VERSION
+    END_VERSIONS
+    """
 }
diff --git a/modules/nf-core/phantompeakqualtools/meta.yml b/modules/nf-core/phantompeakqualtools/meta.yml
index c8290fbd..276f6fdd 100644
--- a/modules/nf-core/phantompeakqualtools/meta.yml
+++ b/modules/nf-core/phantompeakqualtools/meta.yml
@@ -1,5 +1,4 @@
 name: "phantompeakqualtools"
-
 description: |
   "This package computes informative enrichment and quality measures
   for ChIP-seq/DNase-seq/FAIRE-seq/MNase-seq data. It can also be used
@@ -16,12 +15,10 @@ tools:
         for ChIP-seq/DNase-seq/FAIRE-seq/MNase-seq data. It can also be used
         to obtain robust estimates of the predominant fragment length or
         characteristic tag shift values in these assays."
-
       documentation: "https://github.com/kundajelab/phantompeakqualtools"
       tool_dev_url: "https://github.com/kundajelab/phantompeakqualtools"
       doi: "10.1101/gr.136184.111"
-      licence: "['BSD-3-clause']"
-
+      licence: ["BSD-3-clause"]
 input:
   - meta:
       type: map
@@ -32,7 +29,6 @@ input:
       type: file
       description: BAM/CRAM/SAM file
       pattern: "*.{bam,cram,sam}"
-
 output:
   - meta:
       type: map
@@ -57,8 +53,11 @@ output:
       type: file
       description: Rdata file containing the R session
       pattern: "*.{Rdata}"
-
 authors:
   - "@drpatelh"
-  - "@Emiller88"
+  - "@edmundmiller"
+  - "@JoseEspinosa"
+maintainers:
+  - "@drpatelh"
+  - "@edmundmiller"
   - "@JoseEspinosa"
diff --git a/modules/nf-core/phantompeakqualtools/tests/main.nf.test b/modules/nf-core/phantompeakqualtools/tests/main.nf.test
new file mode 100644
index 00000000..ea096bcf
--- /dev/null
+++ b/modules/nf-core/phantompeakqualtools/tests/main.nf.test
@@ -0,0 +1,90 @@
+// nf-core modules test phantompeakqualtools
+nextflow_process {
+
+    name "Test Process PHANTOMPEAKQUALTOOLS"
+    script "../main.nf"
+    process "PHANTOMPEAKQUALTOOLS"
+
+    tag "modules"
+    tag "modules_nfcore"
+    tag "phantompeakqualtools"
+
+    test("sarscov2 - bam - single_end") {
+
+        when {
+            process {
+                """
+                input[0] = [
+                    [ id:'test', single_end:true ], // meta map
+                    file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.single_end.bam', checkIfExists: true)
+                ]
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert process.success },
+                { assert snapshot(process.out.spp,
+                                  file(process.out.pdf.get(0).get(1)).name,
+                                  file(process.out.rdata.get(0).get(1)).name,
+                                  process.out.versions)
+                                  .match()
+                }
+            )
+        }
+
+    }
+
+    test("sarscov2 - bam - paired_end") {
+
+        when {
+            process {
+                """
+                input[0] = [
+                    [ id:'test', single_end:true ], // meta map
+                    file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.bam', checkIfExists: true)
+                ]
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert process.success },
+                { assert snapshot(process.out.spp,
+                                  file(process.out.pdf.get(0).get(1)).name,
+                                  file(process.out.rdata.get(0).get(1)).name,
+                                  process.out.versions)
+                                  .match()
+                }
+            )
+        }
+
+    }
+
+    test("sarscov2 - bam - stub") {
+
+        options "-stub"
+
+        when {
+            process {
+                """
+                input[0] = [
+                    [ id:'test', single_end:false ], // meta map
+                    file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.bam', checkIfExists: true)
+                    ]
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert process.success },
+                { assert snapshot(process.out).match() }
+            )
+        }
+
+    }
+
+}
\ No newline at end of file
diff --git a/modules/nf-core/phantompeakqualtools/tests/main.nf.test.snap b/modules/nf-core/phantompeakqualtools/tests/main.nf.test.snap
new file mode 100644
index 00000000..1021aeb8
--- /dev/null
+++ b/modules/nf-core/phantompeakqualtools/tests/main.nf.test.snap
@@ -0,0 +1,119 @@
+{
+    "sarscov2 - bam - stub": {
+        "content": [
+            {
+                "0": [
+                    [
+                        {
+                            "id": "test",
+                            "single_end": false
+                        },
+                        "test.spp.out:md5,d41d8cd98f00b204e9800998ecf8427e"
+                    ]
+                ],
+                "1": [
+                    [
+                        {
+                            "id": "test",
+                            "single_end": false
+                        },
+                        "test.spp.pdf:md5,d41d8cd98f00b204e9800998ecf8427e"
+                    ]
+                ],
+                "2": [
+                    [
+                        {
+                            "id": "test",
+                            "single_end": false
+                        },
+                        "test.spp.Rdata:md5,d41d8cd98f00b204e9800998ecf8427e"
+                    ]
+                ],
+                "3": [
+                    "versions.yml:md5,e0a48a40af2cf7d5de72c4c3cb47a4fc"
+                ],
+                "pdf": [
+                    [
+                        {
+                            "id": "test",
+                            "single_end": false
+                        },
+                        "test.spp.pdf:md5,d41d8cd98f00b204e9800998ecf8427e"
+                    ]
+                ],
+                "rdata": [
+                    [
+                        {
+                            "id": "test",
+                            "single_end": false
+                        },
+                        "test.spp.Rdata:md5,d41d8cd98f00b204e9800998ecf8427e"
+                    ]
+                ],
+                "spp": [
+                    [
+                        {
+                            "id": "test",
+                            "single_end": false
+                        },
+                        "test.spp.out:md5,d41d8cd98f00b204e9800998ecf8427e"
+                    ]
+                ],
+                "versions": [
+                    "versions.yml:md5,e0a48a40af2cf7d5de72c4c3cb47a4fc"
+                ]
+            }
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "24.04.2"
+        },
+        "timestamp": "2024-06-27T10:46:22.786363"
+    },
+    "sarscov2 - bam - single_end": {
+        "content": [
+            [
+                [
+                    {
+                        "id": "test",
+                        "single_end": true
+                    },
+                    "test.spp.out:md5,b01d976506b6fe45b66c821b1e8a1d15"
+                ]
+            ],
+            "test.spp.pdf",
+            "test.spp.Rdata",
+            [
+                "versions.yml:md5,e0a48a40af2cf7d5de72c4c3cb47a4fc"
+            ]
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "24.04.2"
+        },
+        "timestamp": "2024-06-27T16:21:12.000709154"
+    },
+    "sarscov2 - bam - paired_end": {
+        "content": [
+            [
+                [
+                    {
+                        "id": "test",
+                        "single_end": true
+                    },
+                    "test.spp.out:md5,eed46e75eab119224f397a7a8b5924e6"
+                ]
+            ],
+            "test.spp.pdf",
+            "test.spp.Rdata",
+            [
+                "versions.yml:md5,e0a48a40af2cf7d5de72c4c3cb47a4fc"
+            ]
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "24.04.2"
+        },
+        "timestamp": "2024-06-27T16:21:22.432426907"
+    }
+}
\ No newline at end of file
diff --git a/modules/nf-core/phantompeakqualtools/tests/tags.yml b/modules/nf-core/phantompeakqualtools/tests/tags.yml
new file mode 100644
index 00000000..9031749e
--- /dev/null
+++ b/modules/nf-core/phantompeakqualtools/tests/tags.yml
@@ -0,0 +1,2 @@
+phantompeakqualtools:
+  - "modules/nf-core/phantompeakqualtools/**"
diff --git a/subworkflows/local/prepare_genome.nf b/subworkflows/local/prepare_genome.nf
index bd7033cb..b7878aaf 100644
--- a/subworkflows/local/prepare_genome.nf
+++ b/subworkflows/local/prepare_genome.nf
@@ -27,7 +27,18 @@ include { STAR_GENOMEGENERATE      } from '../../modules/local/star_genomegenera
 
 workflow PREPARE_GENOME {
     take:
+    genome             //  string: genome name
+    genomes            //     map: genome attributes
     prepare_tool_index // string  : tool to prepare index for
+    fasta              //    path: path to genome fasta file
+    gtf                //    file: /path/to/genome.gtf
+    gff                //    file: /path/to/genome.gff
+    blacklist          //    file: /path/to/blacklist.bed
+    gene_bed           //    file: /path/to/gene.bed
+    bwa_index          //    file: /path/to/bwa/index/
+    bowtie2_index      //    file: /path/to/bowtie2/index/
+    chromap_index      //    file: /path/to/chromap/index/
+    star_index         //    file: /path/to/star/index/
 
     main:
 
@@ -37,29 +48,29 @@ workflow PREPARE_GENOME {
     // Uncompress genome fasta file if required
     //
     ch_fasta = Channel.empty()
-    if (params.fasta.endsWith('.gz')) {
-        ch_fasta    = GUNZIP_FASTA ( [ [:], params.fasta ] ).gunzip.map{ it[1] }
+    if (fasta.endsWith('.gz')) {
+        ch_fasta    = GUNZIP_FASTA ( [ [:], fasta ] ).gunzip.map{ it[1] }
         ch_versions = ch_versions.mix(GUNZIP_FASTA.out.versions)
     } else {
-        ch_fasta = Channel.value(file(params.fasta))
+        ch_fasta = Channel.value(file(fasta))
     }
 
     //
     // Uncompress GTF annotation file or create from GFF3 if required
     //
-    if (params.gtf) {
-        if (params.gtf.endsWith('.gz')) {
-            ch_gtf      = GUNZIP_GTF ( [ [:], params.gtf ] ).gunzip.map{ it[1] }
+    if (gtf) {
+        if (gtf.endsWith('.gz')) {
+            ch_gtf      = GUNZIP_GTF ( [ [:], gtf ] ).gunzip.map{ it[1] }
             ch_versions = ch_versions.mix(GUNZIP_GTF.out.versions)
         } else {
-            ch_gtf = file(params.gtf)
+            ch_gtf = Channel.value(file(gtf))
         }
-    } else if (params.gff) {
-        if (params.gff.endsWith('.gz')) {
-            ch_gff      = GUNZIP_GFF ( [ [:], params.gff ] ).gunzip.map{ it[1] }
+    } else if (gff) {
+        if (gff.endsWith('.gz')) {
+            ch_gff      = GUNZIP_GFF ( [ [:], gff ] ).gunzip.map{ it[1] }
             ch_versions = ch_versions.mix(GUNZIP_GFF.out.versions)
         } else {
-            ch_gff = file(params.gff)
+            ch_gff = Channel.value(file(gff))
         }
         ch_gtf      = GFFREAD ( ch_gff ).gtf
         ch_versions = ch_versions.mix(GFFREAD.out.versions)
@@ -69,12 +80,12 @@ workflow PREPARE_GENOME {
     // Uncompress blacklist file if required
     //
     ch_blacklist = Channel.empty()
-    if (params.blacklist) {
-        if (params.blacklist.endsWith('.gz')) {
-            ch_blacklist = GUNZIP_BLACKLIST ( [ [:], params.blacklist ] ).gunzip.map{ it[1] }
+    if (blacklist) {
+        if (blacklist.endsWith('.gz')) {
+            ch_blacklist = GUNZIP_BLACKLIST ( [ [:], blacklist ] ).gunzip.map{ it[1] }
             ch_versions  = ch_versions.mix(GUNZIP_BLACKLIST.out.versions)
         } else {
-            ch_blacklist = Channel.fromPath(file(params.blacklist))
+            ch_blacklist = Channel.value(file(blacklist))
         }
     }
 
@@ -85,10 +96,10 @@ workflow PREPARE_GENOME {
     // If --gtf is supplied along with --genome
     // Make gene bed from supplied --gtf instead of using iGenomes one automatically
     def make_bed = false
-    if (!params.gene_bed) {
+    if (!gene_bed) {
         make_bed = true
-    } else if (params.genome && params.gtf) {
-        if (params.genomes[ params.genome ].gtf != params.gtf) {
+    } else if (genome && gtf) {
+        if (genomes[ genome ].gtf != gtf) {
             make_bed = true
         }
     }
@@ -97,11 +108,11 @@ workflow PREPARE_GENOME {
         ch_gene_bed = GTF2BED ( ch_gtf ).bed
         ch_versions = ch_versions.mix(GTF2BED.out.versions)
     } else {
-        if (params.gene_bed.endsWith('.gz')) {
-            ch_gene_bed = GUNZIP_GENE_BED ( [ [:], params.gene_bed ] ).gunzip.map{ it[1] }
+        if (gene_bed.endsWith('.gz')) {
+            ch_gene_bed = GUNZIP_GENE_BED ( [ [:], gene_bed ] ).gunzip.map{ it[1] }
             ch_versions = ch_versions.mix(GUNZIP_GENE_BED.out.versions)
         } else {
-            ch_gene_bed = Channel.value(file(params.gene_bed))
+            ch_gene_bed = Channel.value(file(gene_bed))
         }
     }
 
@@ -119,24 +130,23 @@ workflow PREPARE_GENOME {
     ch_genome_filtered_bed = Channel.empty()
 
     GENOME_BLACKLIST_REGIONS (
-        CUSTOM_GETCHROMSIZES.out.sizes.map{ it[1] },
+        ch_chrom_sizes,
         ch_blacklist.ifEmpty([])
     )
     ch_genome_filtered_bed = GENOME_BLACKLIST_REGIONS.out.bed
     ch_versions = ch_versions.mix(GENOME_BLACKLIST_REGIONS.out.versions)
 
-
     //
     // Uncompress BWA index or generate from scratch if required
     //
     ch_bwa_index = Channel.empty()
     if (prepare_tool_index == 'bwa') {
-        if (params.bwa_index) {
-            if (params.bwa_index.endsWith('.tar.gz')) {
-                ch_bwa_index = UNTAR_BWA_INDEX ( [ [:], params.bwa_index ] ).untar
+        if (bwa_index) {
+            if (bwa_index.endsWith('.tar.gz')) {
+                ch_bwa_index = UNTAR_BWA_INDEX ( [ [:], bwa_index ] ).untar
                 ch_versions  = ch_versions.mix(UNTAR_BWA_INDEX.out.versions)
             } else {
-                ch_bwa_index = file(params.bwa_index)
+                ch_bwa_index = file(bwa_index)
             }
         } else {
             ch_bwa_index = BWA_INDEX ( ch_fasta.map { [ [:], it ] } ).index
@@ -149,12 +159,12 @@ workflow PREPARE_GENOME {
     //
     ch_bowtie2_index = Channel.empty()
     if (prepare_tool_index == 'bowtie2') {
-        if (params.bowtie2_index) {
-            if (params.bowtie2_index.endsWith('.tar.gz')) {
-                ch_bowtie2_index = UNTAR_BOWTIE2_INDEX ( [ [:], params.bowtie2_index ] ).untar
+        if (bowtie2_index) {
+            if (bowtie2_index.endsWith('.tar.gz')) {
+                ch_bowtie2_index = UNTAR_BOWTIE2_INDEX ( [ [:], bowtie2_index ] ).untar
                 ch_versions  = ch_versions.mix(UNTAR_BOWTIE2_INDEX.out.versions)
             } else {
-                ch_bowtie2_index = [ [:], file(params.bowtie2_index) ]
+                ch_bowtie2_index = [ [:], file(bowtie2_index) ]
             }
         } else {
             ch_bowtie2_index = BOWTIE2_BUILD ( ch_fasta.map { [ [:], it ] } ).index
@@ -167,12 +177,12 @@ workflow PREPARE_GENOME {
     //
     ch_chromap_index = Channel.empty()
     if (prepare_tool_index == 'chromap') {
-        if (params.chromap_index) {
-            if (params.chromap_index.endsWith('.tar.gz')) {
-                ch_chromap_index = UNTARFILES ( [ [:], params.chromap_index ] ).files
+        if (chromap_index) {
+            if (chromap_index.endsWith('.tar.gz')) {
+                ch_chromap_index = UNTARFILES ( [ [:], chromap_index ] ).files
                 ch_versions  = ch_versions.mix(UNTARFILES.out.versions)
             } else {
-                ch_chromap_index = [ [:], file(params.chromap_index) ]
+                ch_chromap_index = [ [:], file(chromap_index) ]
             }
         } else {
             ch_chromap_index = CHROMAP_INDEX ( ch_fasta.map { [ [:], it ] } ).index
@@ -185,12 +195,12 @@ workflow PREPARE_GENOME {
     //
     ch_star_index = Channel.empty()
     if (prepare_tool_index == 'star') {
-        if (params.star_index) {
-            if (params.star_index.endsWith('.tar.gz')) {
-                ch_star_index = UNTAR_STAR_INDEX ( [ [:], params.star_index ] ).untar.map{ it[1] }
+        if (star_index) {
+            if (star_index.endsWith('.tar.gz')) {
+                ch_star_index = UNTAR_STAR_INDEX ( [ [:], star_index ] ).untar.map{ it[1] }
                 ch_versions   = ch_versions.mix(UNTAR_STAR_INDEX.out.versions)
             } else {
-                ch_star_index = Channel.value(file(params.star_index))
+                ch_star_index = Channel.value(file(star_index))
             }
         } else {
             ch_star_index = STAR_GENOMEGENERATE ( ch_fasta, ch_gtf ).index
@@ -209,6 +219,5 @@ workflow PREPARE_GENOME {
     bowtie2_index = ch_bowtie2_index          //    path: bowtie2/index/
     chromap_index = ch_chromap_index          //    path: genome.index
     star_index    = ch_star_index             //    path: star/index/
-
-    versions    = ch_versions.ifEmpty(null) // channel: [ versions.yml ]
+    versions    = ch_versions.ifEmpty(null)   // channel: [ versions.yml ]
 }
diff --git a/subworkflows/local/utils_nfcore_chipseq_pipeline/main.nf b/subworkflows/local/utils_nfcore_chipseq_pipeline/main.nf
new file mode 100644
index 00000000..a1433174
--- /dev/null
+++ b/subworkflows/local/utils_nfcore_chipseq_pipeline/main.nf
@@ -0,0 +1,268 @@
+//
+// Subworkflow with functionality specific to the nf-core/chipseq pipeline
+//
+
+/*
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+*/
+
+include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin'
+include { paramsSummaryMap          } from 'plugin/nf-validation'
+include { UTILS_NEXTFLOW_PIPELINE   } from '../../nf-core/utils_nextflow_pipeline'
+include { completionEmail           } from '../../nf-core/utils_nfcore_pipeline'
+include { completionSummary         } from '../../nf-core/utils_nfcore_pipeline'
+include { dashedLine                } from '../../nf-core/utils_nfcore_pipeline'
+include { nfCoreLogo                } from '../../nf-core/utils_nfcore_pipeline'
+include { imNotification            } from '../../nf-core/utils_nfcore_pipeline'
+include { UTILS_NFCORE_PIPELINE     } from '../../nf-core/utils_nfcore_pipeline'
+include { workflowCitation          } from '../../nf-core/utils_nfcore_pipeline'
+
+/*
+========================================================================================
+    SUBWORKFLOW TO INITIALISE PIPELINE
+========================================================================================
+*/
+
+workflow PIPELINE_INITIALISATION {
+
+    take:
+    version           // boolean: Display version and exit
+    help              // boolean: Display help text
+    validate_params   // boolean: Boolean whether to validate parameters against the schema at runtime
+    monochrome_logs   // boolean: Do not use coloured log outputs
+    nextflow_cli_args //   array: List of positional nextflow CLI args
+    outdir            //  string: The output directory where the results will be saved
+
+    main:
+
+    //
+    // Print version and exit if required and dump pipeline parameters to JSON file
+    //
+    UTILS_NEXTFLOW_PIPELINE (
+        version,
+        true,
+        outdir,
+        workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1
+    )
+
+    //
+    // Validate parameters and generate parameter summary to stdout
+    //
+    pre_help_text = nfCoreLogo(monochrome_logs)
+    post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs)
+    def String workflow_command = "nextflow run ${workflow.manifest.name} -profile <docker/singularity/.../institute> --input samplesheet.csv --genome GRCh37 --outdir <OUTDIR>"
+    UTILS_NFVALIDATION_PLUGIN (
+        help,
+        workflow_command,
+        pre_help_text,
+        post_help_text,
+        validate_params,
+        "nextflow_schema.json"
+    )
+
+    //
+    // Check config provided to the pipeline
+    //
+    UTILS_NFCORE_PIPELINE (
+        nextflow_cli_args
+    )
+
+    //
+    // Custom validation for pipeline parameters
+    //
+    validateInputParameters()
+
+}
+
+/*
+========================================================================================
+    SUBWORKFLOW FOR PIPELINE COMPLETION
+========================================================================================
+*/
+
+workflow PIPELINE_COMPLETION {
+
+    take:
+    email           //  string: email address
+    email_on_fail   //  string: email address sent on pipeline failure
+    plaintext_email // boolean: Send plain-text email instead of HTML
+    outdir          //    path: Path to output directory where results will be published
+    monochrome_logs // boolean: Disable ANSI colour codes in log output
+    hook_url        //  string: hook URL for notifications
+    multiqc_report  //  string: Path to MultiQC report
+
+    main:
+
+    summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json")
+
+    //
+    // Completion email and summary
+    //
+    workflow.onComplete {
+        if (email || email_on_fail) {
+            completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList())
+        }
+
+        completionSummary(monochrome_logs)
+
+        if (hook_url) {
+            imNotification(summary_params, hook_url)
+        }
+    }
+
+    workflow.onError {
+        log.error "Pipeline failed. Please refer to troubleshooting docs: https://nf-co.re/docs/usage/troubleshooting"
+    }
+}
+
+/*
+========================================================================================
+    FUNCTIONS
+========================================================================================
+*/
+
+//
+// Check and validate pipeline parameters
+//
+def validateInputParameters() {
+
+    genomeExistsError()
+
+    if (!params.fasta) {
+        error("Genome fasta file not specified with e.g. '--fasta genome.fa' or via a detectable config file.")
+    }
+
+    if (!params.gtf && !params.gff) {
+        error("No GTF or GFF3 annotation specified! The pipeline requires at least one of these files.")
+    }
+
+    if (params.gtf && params.gff) {
+        gtfGffWarn(log)
+    }
+
+    if (!params.macs_gsize) {
+        macsGsizeWarn(log)
+    }
+
+    if (!params.read_length && !params.macs_gsize) {
+        error ("Both '--read_length' and '--macs_gsize' not specified! Please specify either to infer MACS2 genome size for peak calling.")
+    }
+}
+
+//
+// Get attribute from genome config file e.g. fasta
+//
+def getGenomeAttribute(attribute) {
+    if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) {
+        if (params.genomes[ params.genome ].containsKey(attribute)) {
+            return params.genomes[ params.genome ][ attribute ]
+        }
+    }
+    return null
+}
+
+//
+// Exit pipeline if incorrect --genome key provided
+//
+def genomeExistsError() {
+    if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) {
+        def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
+            "  Genome '${params.genome}' not found in any config files provided to the pipeline.\n" +
+            "  Currently, the available genome keys are:\n" +
+            "  ${params.genomes.keySet().join(", ")}\n" +
+            "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+        error(error_string)
+    }
+}
+
+//
+// Get macs genome size (macs_gsize)
+//
+def getMacsGsize(params) {
+    def val = null
+    if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) {
+        if (params.genomes[ params.genome ].containsKey('macs_gsize')) {
+            if (params.genomes[ params.genome ][ 'macs_gsize' ].containsKey(params.read_length.toString())) {
+                val = params.genomes[ params.genome ][ 'macs_gsize' ][ params.read_length.toString() ]
+            }
+        }
+    }
+    return val
+}
+
+//
+// Generate methods description for MultiQC
+//
+def toolCitationText() {
+    // TODO nf-core: Optionally add in-text citation tools to this list.
+    // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "",
+    // Uncomment function in methodsDescriptionText to render in MultiQC report
+    def citation_text = [
+            "Tools used in the workflow included:",
+            "FastQC (Andrews 2010),",
+            "MultiQC (Ewels et al. 2016)",
+            "."
+        ].join(' ').trim()
+
+    return citation_text
+}
+
+def toolBibliographyText() {
+    // TODO nf-core: Optionally add bibliographic entries to this list.
+    // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "<li>Author (2023) Pub name, Journal, DOI</li>" : "",
+    // Uncomment function in methodsDescriptionText to render in MultiQC report
+    def reference_text = [
+            "<li>Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).</li>",
+            "<li>Ewels, P., Magnusson, M., Lundin, S., & KΓ€ller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354</li>"
+        ].join(' ').trim()
+
+    return reference_text
+}
+
+def methodsDescriptionText(mqc_methods_yaml) {
+    // Convert  to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file
+    def meta = [:]
+    meta.workflow = workflow.toMap()
+    meta["manifest_map"] = workflow.manifest.toMap()
+
+    // Pipeline DOI
+    meta["doi_text"] = meta.manifest_map.doi ? "(doi: <a href=\'https://doi.org/${meta.manifest_map.doi}\'>${meta.manifest_map.doi}</a>)" : ""
+    meta["nodoi_text"] = meta.manifest_map.doi ? "": "<li>If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. </li>"
+
+    // Tool references
+    meta["tool_citations"] = ""
+    meta["tool_bibliography"] = ""
+
+    // TODO nf-core: Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled!
+    // meta["tool_citations"] = toolCitationText().replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".")
+    // meta["tool_bibliography"] = toolBibliographyText()
+    def methods_text = mqc_methods_yaml.text
+
+    def engine =  new groovy.text.SimpleTemplateEngine()
+    def description_html = engine.createTemplate(methods_text).make(meta)
+
+    return description_html.toString()
+}
+
+//
+// Print a warning if both GTF and GFF have been provided
+//
+def gtfGffWarn(log) {
+    log.warn "=============================================================================\n" +
+        "  Both '--gtf' and '--gff' parameters have been provided.\n" +
+        "  Using GTF file as priority.\n" +
+        "==================================================================================="
+}
+
+//
+// Print a warning if macs_gsize parameter has not been provided
+//
+def macsGsizeWarn(log) {
+    log.warn "=============================================================================\n" +
+        "  --macs_gsize parameter has not been provided.\n" +
+        "  It will be auto-calculated by 'khmer unique-kmers.py' using the '--read_length' parameter.\n" +
+        "  Explicitly provide '--macs_gsize macs2_genome_size' to change this behaviour.\n" +
+        "==================================================================================="
+}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
new file mode 100644
index 00000000..ac31f28f
--- /dev/null
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
@@ -0,0 +1,126 @@
+//
+// Subworkflow with functionality that may be useful for any Nextflow pipeline
+//
+
+import org.yaml.snakeyaml.Yaml
+import groovy.json.JsonOutput
+import nextflow.extension.FilesEx
+
+/*
+========================================================================================
+    SUBWORKFLOW DEFINITION
+========================================================================================
+*/
+
+workflow UTILS_NEXTFLOW_PIPELINE {
+
+    take:
+    print_version        // boolean: print version
+    dump_parameters      // boolean: dump parameters
+    outdir               //    path: base directory used to publish pipeline results
+    check_conda_channels // boolean: check conda channels
+
+    main:
+
+    //
+    // Print workflow version and exit on --version
+    //
+    if (print_version) {
+        log.info "${workflow.manifest.name} ${getWorkflowVersion()}"
+        System.exit(0)
+    }
+
+    //
+    // Dump pipeline parameters to a JSON file
+    //
+    if (dump_parameters && outdir) {
+        dumpParametersToJSON(outdir)
+    }
+
+    //
+    // When running with Conda, warn if channels have not been set-up appropriately
+    //
+    if (check_conda_channels) {
+        checkCondaChannels()
+    }
+
+    emit:
+    dummy_emit = true
+}
+
+/*
+========================================================================================
+    FUNCTIONS
+========================================================================================
+*/
+
+//
+// Generate version string
+//
+def getWorkflowVersion() {
+    String version_string = ""
+    if (workflow.manifest.version) {
+        def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
+        version_string += "${prefix_v}${workflow.manifest.version}"
+    }
+
+    if (workflow.commitId) {
+        def git_shortsha = workflow.commitId.substring(0, 7)
+        version_string += "-g${git_shortsha}"
+    }
+
+    return version_string
+}
+
+//
+// Dump pipeline parameters to a JSON file
+//
+def dumpParametersToJSON(outdir) {
+    def timestamp  = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
+    def filename   = "params_${timestamp}.json"
+    def temp_pf    = new File(workflow.launchDir.toString(), ".${filename}")
+    def jsonStr    = JsonOutput.toJson(params)
+    temp_pf.text   = JsonOutput.prettyPrint(jsonStr)
+
+    FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json")
+    temp_pf.delete()
+}
+
+//
+// When running with -profile conda, warn if channels have not been set-up appropriately
+//
+def checkCondaChannels() {
+    Yaml parser = new Yaml()
+    def channels = []
+    try {
+        def config = parser.load("conda config --show channels".execute().text)
+        channels = config.channels
+    } catch(NullPointerException | IOException e) {
+        log.warn "Could not verify conda channel configuration."
+        return
+    }
+
+    // Check that all channels are present
+    // This channel list is ordered by required channel priority.
+    def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults']
+    def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean
+
+    // Check that they are in the right order
+    def channel_priority_violation = false
+    def n = required_channels_in_order.size()
+    for (int i = 0; i < n - 1; i++) {
+        channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1]))
+    }
+
+    if (channels_missing | channel_priority_violation) {
+        log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
+            "  There is a problem with your Conda configuration!\n\n" +
+            "  You will need to set-up the conda-forge and bioconda channels correctly.\n" +
+            "  Please refer to https://bioconda.github.io/\n" +
+            "  The observed channel order is \n" +
+            "  ${channels}\n" +
+            "  but the following channel order is required:\n" +
+            "  ${required_channels_in_order}\n" +
+            "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+    }
+}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml b/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml
new file mode 100644
index 00000000..e5c3a0a8
--- /dev/null
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml
@@ -0,0 +1,38 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
+name: "UTILS_NEXTFLOW_PIPELINE"
+description: Subworkflow with functionality that may be useful for any Nextflow pipeline
+keywords:
+  - utility
+  - pipeline
+  - initialise
+  - version
+components: []
+input:
+  - print_version:
+      type: boolean
+      description: |
+        Print the version of the pipeline and exit
+  - dump_parameters:
+      type: boolean
+      description: |
+        Dump the parameters of the pipeline to a JSON file
+  - output_directory:
+      type: directory
+      description: Path to output dir to write JSON file to.
+      pattern: "results/"
+  - check_conda_channel:
+      type: boolean
+      description: |
+        Check if the conda channel priority is correct.
+output:
+  - dummy_emit:
+      type: boolean
+      description: |
+        Dummy emit to make nf-core subworkflows lint happy
+authors:
+  - "@adamrtalbot"
+  - "@drpatelh"
+maintainers:
+  - "@adamrtalbot"
+  - "@drpatelh"
+  - "@maxulysse"
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test
new file mode 100644
index 00000000..68718e4f
--- /dev/null
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test
@@ -0,0 +1,54 @@
+
+nextflow_function {
+
+    name "Test Functions"
+    script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf"
+    config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config"
+    tag 'subworkflows'
+    tag 'utils_nextflow_pipeline'
+    tag 'subworkflows/utils_nextflow_pipeline'
+
+    test("Test Function getWorkflowVersion") {
+
+        function "getWorkflowVersion"
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+
+    test("Test Function dumpParametersToJSON") {
+
+        function "dumpParametersToJSON"
+
+        when {
+            function {
+                """
+                // define inputs of the function here. Example:
+                input[0] = "$outputDir"
+                """.stripIndent()
+            }
+        }
+
+        then {
+            assertAll(
+                { assert function.success }
+            )
+        }
+    }
+
+    test("Test Function checkCondaChannels") {
+
+        function "checkCondaChannels"
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap
new file mode 100644
index 00000000..e3f0baf4
--- /dev/null
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap
@@ -0,0 +1,20 @@
+{
+    "Test Function getWorkflowVersion": {
+        "content": [
+            "v9.9.9"
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:02:05.308243"
+    },
+    "Test Function checkCondaChannels": {
+        "content": null,
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:02:12.425833"
+    }
+}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
new file mode 100644
index 00000000..ca964ce8
--- /dev/null
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
@@ -0,0 +1,111 @@
+nextflow_workflow {
+
+    name "Test Workflow UTILS_NEXTFLOW_PIPELINE"
+    script "../main.nf"
+    config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config"
+    workflow "UTILS_NEXTFLOW_PIPELINE"
+    tag 'subworkflows'
+    tag 'utils_nextflow_pipeline'
+    tag 'subworkflows/utils_nextflow_pipeline'
+
+    test("Should run no inputs") {
+
+        when {
+            workflow {
+                """
+                print_version        = false
+                dump_parameters      = false
+                outdir               = null
+                check_conda_channels = false
+
+                input[0] = print_version
+                input[1] = dump_parameters
+                input[2] = outdir
+                input[3] = check_conda_channels
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success }
+            )
+        }
+    }
+
+    test("Should print version") {
+
+        when {
+            workflow {
+                """
+                print_version        = true
+                dump_parameters      = false
+                outdir               = null
+                check_conda_channels = false
+
+                input[0] = print_version
+                input[1] = dump_parameters
+                input[2] = outdir
+                input[3] = check_conda_channels
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success },
+                { assert workflow.stdout.contains("nextflow_workflow v9.9.9") }
+            )
+        }
+    }
+
+    test("Should dump params") {
+
+        when {
+            workflow {
+                """
+                print_version        = false
+                dump_parameters      = true
+                outdir               = 'results'
+                check_conda_channels = false
+
+                input[0] = false
+                input[1] = true
+                input[2] = outdir
+                input[3] = false
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success }
+            )
+        }
+    }
+
+    test("Should not create params JSON if no output directory") {
+
+        when {
+            workflow {
+                """
+                print_version        = false
+                dump_parameters      = true
+                outdir               = null
+                check_conda_channels = false
+
+                input[0] = false
+                input[1] = true
+                input[2] = outdir
+                input[3] = false
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success }
+            )
+        }
+    }
+}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config
new file mode 100644
index 00000000..d0a926bf
--- /dev/null
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config
@@ -0,0 +1,9 @@
+manifest {
+    name            = 'nextflow_workflow'
+    author          = """nf-core"""
+    homePage        = 'https://127.0.0.1'
+    description     = """Dummy pipeline"""
+    nextflowVersion  = '!>=23.04.0'
+    version         = '9.9.9'
+    doi             = 'https://doi.org/10.5281/zenodo.5070524'
+}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml b/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml
new file mode 100644
index 00000000..f8476112
--- /dev/null
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml
@@ -0,0 +1,2 @@
+subworkflows/utils_nextflow_pipeline:
+  - subworkflows/nf-core/utils_nextflow_pipeline/**
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
new file mode 100644
index 00000000..14558c39
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
@@ -0,0 +1,446 @@
+//
+// Subworkflow with utility functions specific to the nf-core pipeline template
+//
+
+import org.yaml.snakeyaml.Yaml
+import nextflow.extension.FilesEx
+
+/*
+========================================================================================
+    SUBWORKFLOW DEFINITION
+========================================================================================
+*/
+
+workflow UTILS_NFCORE_PIPELINE {
+
+    take:
+    nextflow_cli_args
+
+    main:
+    valid_config = checkConfigProvided()
+    checkProfileProvided(nextflow_cli_args)
+
+    emit:
+    valid_config
+}
+
+/*
+========================================================================================
+    FUNCTIONS
+========================================================================================
+*/
+
+//
+//  Warn if a -profile or Nextflow config has not been provided to run the pipeline
+//
+def checkConfigProvided() {
+    valid_config = true
+    if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) {
+        log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" +
+            "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" +
+            "   (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" +
+            "   (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" +
+            "   (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" +
+            "Please refer to the quick start section and usage docs for the pipeline.\n "
+        valid_config = false
+    }
+    return valid_config
+}
+
+//
+// Exit pipeline if --profile contains spaces
+//
+def checkProfileProvided(nextflow_cli_args) {
+    if (workflow.profile.endsWith(',')) {
+        error "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" +
+            "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
+    }
+    if (nextflow_cli_args[0]) {
+        log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" +
+            "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
+    }
+}
+
+//
+// Citation string for pipeline
+//
+def workflowCitation() {
+    def temp_doi_ref = ""
+    String[] manifest_doi = workflow.manifest.doi.tokenize(",")
+    // Using a loop to handle multiple DOIs
+    // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers
+    // Removing ` ` since the manifest.doi is a string and not a proper list
+    for (String doi_ref: manifest_doi) temp_doi_ref += "  https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n"
+    return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" +
+        "* The pipeline\n" +
+        temp_doi_ref + "\n" +
+        "* The nf-core framework\n" +
+        "  https://doi.org/10.1038/s41587-020-0439-x\n\n" +
+        "* Software dependencies\n" +
+        "  https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md"
+}
+
+//
+// Generate workflow version string
+//
+def getWorkflowVersion() {
+    String version_string = ""
+    if (workflow.manifest.version) {
+        def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
+        version_string += "${prefix_v}${workflow.manifest.version}"
+    }
+
+    if (workflow.commitId) {
+        def git_shortsha = workflow.commitId.substring(0, 7)
+        version_string += "-g${git_shortsha}"
+    }
+
+    return version_string
+}
+
+//
+// Get software versions for pipeline
+//
+def processVersionsFromYAML(yaml_file) {
+    Yaml yaml = new Yaml()
+    versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] }
+    return yaml.dumpAsMap(versions).trim()
+}
+
+//
+// Get workflow version for pipeline
+//
+def workflowVersionToYAML() {
+    return """
+    Workflow:
+        $workflow.manifest.name: ${getWorkflowVersion()}
+        Nextflow: $workflow.nextflow.version
+    """.stripIndent().trim()
+}
+
+//
+// Get channel of software versions used in pipeline in YAML format
+//
+def softwareVersionsToYAML(ch_versions) {
+    return ch_versions
+                .unique()
+                .map { processVersionsFromYAML(it) }
+                .unique()
+                .mix(Channel.of(workflowVersionToYAML()))
+}
+
+//
+// Get workflow summary for MultiQC
+//
+def paramsSummaryMultiqc(summary_params) {
+    def summary_section = ''
+    for (group in summary_params.keySet()) {
+        def group_params = summary_params.get(group)  // This gets the parameters of that particular group
+        if (group_params) {
+            summary_section += "    <p style=\"font-size:110%\"><b>$group</b></p>\n"
+            summary_section += "    <dl class=\"dl-horizontal\">\n"
+            for (param in group_params.keySet()) {
+                summary_section += "        <dt>$param</dt><dd><samp>${group_params.get(param) ?: '<span style=\"color:#999999;\">N/A</a>'}</samp></dd>\n"
+            }
+            summary_section += "    </dl>\n"
+        }
+    }
+
+    String yaml_file_text  = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n"
+    yaml_file_text        += "description: ' - this information is collected when the pipeline is started.'\n"
+    yaml_file_text        += "section_name: '${workflow.manifest.name} Workflow Summary'\n"
+    yaml_file_text        += "section_href: 'https://github.com/${workflow.manifest.name}'\n"
+    yaml_file_text        += "plot_type: 'html'\n"
+    yaml_file_text        += "data: |\n"
+    yaml_file_text        += "${summary_section}"
+
+    return yaml_file_text
+}
+
+//
+// nf-core logo
+//
+def nfCoreLogo(monochrome_logs=true) {
+    Map colors = logColours(monochrome_logs)
+    String.format(
+        """\n
+        ${dashedLine(monochrome_logs)}
+                                                ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset}
+        ${colors.blue}        ___     __   __   __   ___     ${colors.green}/,-._.--~\'${colors.reset}
+        ${colors.blue}  |\\ | |__  __ /  ` /  \\ |__) |__         ${colors.yellow}}  {${colors.reset}
+        ${colors.blue}  | \\| |       \\__, \\__/ |  \\ |___     ${colors.green}\\`-._,-`-,${colors.reset}
+                                                ${colors.green}`._,._,\'${colors.reset}
+        ${colors.purple}  ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset}
+        ${dashedLine(monochrome_logs)}
+        """.stripIndent()
+    )
+}
+
+//
+// Return dashed line
+//
+def dashedLine(monochrome_logs=true) {
+    Map colors = logColours(monochrome_logs)
+    return "-${colors.dim}----------------------------------------------------${colors.reset}-"
+}
+
+//
+// ANSII colours used for terminal logging
+//
+def logColours(monochrome_logs=true) {
+    Map colorcodes = [:]
+
+    // Reset / Meta
+    colorcodes['reset']      = monochrome_logs ? '' : "\033[0m"
+    colorcodes['bold']       = monochrome_logs ? '' : "\033[1m"
+    colorcodes['dim']        = monochrome_logs ? '' : "\033[2m"
+    colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m"
+    colorcodes['blink']      = monochrome_logs ? '' : "\033[5m"
+    colorcodes['reverse']    = monochrome_logs ? '' : "\033[7m"
+    colorcodes['hidden']     = monochrome_logs ? '' : "\033[8m"
+
+    // Regular Colors
+    colorcodes['black']      = monochrome_logs ? '' : "\033[0;30m"
+    colorcodes['red']        = monochrome_logs ? '' : "\033[0;31m"
+    colorcodes['green']      = monochrome_logs ? '' : "\033[0;32m"
+    colorcodes['yellow']     = monochrome_logs ? '' : "\033[0;33m"
+    colorcodes['blue']       = monochrome_logs ? '' : "\033[0;34m"
+    colorcodes['purple']     = monochrome_logs ? '' : "\033[0;35m"
+    colorcodes['cyan']       = monochrome_logs ? '' : "\033[0;36m"
+    colorcodes['white']      = monochrome_logs ? '' : "\033[0;37m"
+
+    // Bold
+    colorcodes['bblack']     = monochrome_logs ? '' : "\033[1;30m"
+    colorcodes['bred']       = monochrome_logs ? '' : "\033[1;31m"
+    colorcodes['bgreen']     = monochrome_logs ? '' : "\033[1;32m"
+    colorcodes['byellow']    = monochrome_logs ? '' : "\033[1;33m"
+    colorcodes['bblue']      = monochrome_logs ? '' : "\033[1;34m"
+    colorcodes['bpurple']    = monochrome_logs ? '' : "\033[1;35m"
+    colorcodes['bcyan']      = monochrome_logs ? '' : "\033[1;36m"
+    colorcodes['bwhite']     = monochrome_logs ? '' : "\033[1;37m"
+
+    // Underline
+    colorcodes['ublack']     = monochrome_logs ? '' : "\033[4;30m"
+    colorcodes['ured']       = monochrome_logs ? '' : "\033[4;31m"
+    colorcodes['ugreen']     = monochrome_logs ? '' : "\033[4;32m"
+    colorcodes['uyellow']    = monochrome_logs ? '' : "\033[4;33m"
+    colorcodes['ublue']      = monochrome_logs ? '' : "\033[4;34m"
+    colorcodes['upurple']    = monochrome_logs ? '' : "\033[4;35m"
+    colorcodes['ucyan']      = monochrome_logs ? '' : "\033[4;36m"
+    colorcodes['uwhite']     = monochrome_logs ? '' : "\033[4;37m"
+
+    // High Intensity
+    colorcodes['iblack']     = monochrome_logs ? '' : "\033[0;90m"
+    colorcodes['ired']       = monochrome_logs ? '' : "\033[0;91m"
+    colorcodes['igreen']     = monochrome_logs ? '' : "\033[0;92m"
+    colorcodes['iyellow']    = monochrome_logs ? '' : "\033[0;93m"
+    colorcodes['iblue']      = monochrome_logs ? '' : "\033[0;94m"
+    colorcodes['ipurple']    = monochrome_logs ? '' : "\033[0;95m"
+    colorcodes['icyan']      = monochrome_logs ? '' : "\033[0;96m"
+    colorcodes['iwhite']     = monochrome_logs ? '' : "\033[0;97m"
+
+    // Bold High Intensity
+    colorcodes['biblack']    = monochrome_logs ? '' : "\033[1;90m"
+    colorcodes['bired']      = monochrome_logs ? '' : "\033[1;91m"
+    colorcodes['bigreen']    = monochrome_logs ? '' : "\033[1;92m"
+    colorcodes['biyellow']   = monochrome_logs ? '' : "\033[1;93m"
+    colorcodes['biblue']     = monochrome_logs ? '' : "\033[1;94m"
+    colorcodes['bipurple']   = monochrome_logs ? '' : "\033[1;95m"
+    colorcodes['bicyan']     = monochrome_logs ? '' : "\033[1;96m"
+    colorcodes['biwhite']    = monochrome_logs ? '' : "\033[1;97m"
+
+    return colorcodes
+}
+
+//
+// Attach the multiqc report to email
+//
+def attachMultiqcReport(multiqc_report) {
+    def mqc_report = null
+    try {
+        if (workflow.success) {
+            mqc_report = multiqc_report.getVal()
+            if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) {
+                if (mqc_report.size() > 1) {
+                    log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one"
+                }
+                mqc_report = mqc_report[0]
+            }
+        }
+    } catch (all) {
+        if (multiqc_report) {
+            log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email"
+        }
+    }
+    return mqc_report
+}
+
+//
+// Construct and send completion email
+//
+def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) {
+
+    // Set up the e-mail variables
+    def subject = "[$workflow.manifest.name] Successful: $workflow.runName"
+    if (!workflow.success) {
+        subject = "[$workflow.manifest.name] FAILED: $workflow.runName"
+    }
+
+    def summary = [:]
+    for (group in summary_params.keySet()) {
+        summary << summary_params[group]
+    }
+
+    def misc_fields = [:]
+    misc_fields['Date Started']              = workflow.start
+    misc_fields['Date Completed']            = workflow.complete
+    misc_fields['Pipeline script file path'] = workflow.scriptFile
+    misc_fields['Pipeline script hash ID']   = workflow.scriptId
+    if (workflow.repository) misc_fields['Pipeline repository Git URL']    = workflow.repository
+    if (workflow.commitId)   misc_fields['Pipeline repository Git Commit'] = workflow.commitId
+    if (workflow.revision)   misc_fields['Pipeline Git branch/tag']        = workflow.revision
+    misc_fields['Nextflow Version']           = workflow.nextflow.version
+    misc_fields['Nextflow Build']             = workflow.nextflow.build
+    misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp
+
+    def email_fields = [:]
+    email_fields['version']      = getWorkflowVersion()
+    email_fields['runName']      = workflow.runName
+    email_fields['success']      = workflow.success
+    email_fields['dateComplete'] = workflow.complete
+    email_fields['duration']     = workflow.duration
+    email_fields['exitStatus']   = workflow.exitStatus
+    email_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
+    email_fields['errorReport']  = (workflow.errorReport ?: 'None')
+    email_fields['commandLine']  = workflow.commandLine
+    email_fields['projectDir']   = workflow.projectDir
+    email_fields['summary']      = summary << misc_fields
+
+    // On success try attach the multiqc report
+    def mqc_report = attachMultiqcReport(multiqc_report)
+
+    // Check if we are only sending emails on failure
+    def email_address = email
+    if (!email && email_on_fail && !workflow.success) {
+        email_address = email_on_fail
+    }
+
+    // Render the TXT template
+    def engine       = new groovy.text.GStringTemplateEngine()
+    def tf           = new File("${workflow.projectDir}/assets/email_template.txt")
+    def txt_template = engine.createTemplate(tf).make(email_fields)
+    def email_txt    = txt_template.toString()
+
+    // Render the HTML template
+    def hf            = new File("${workflow.projectDir}/assets/email_template.html")
+    def html_template = engine.createTemplate(hf).make(email_fields)
+    def email_html    = html_template.toString()
+
+    // Render the sendmail template
+    def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit
+    def smail_fields           = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ]
+    def sf                     = new File("${workflow.projectDir}/assets/sendmail_template.txt")
+    def sendmail_template      = engine.createTemplate(sf).make(smail_fields)
+    def sendmail_html          = sendmail_template.toString()
+
+    // Send the HTML e-mail
+    Map colors = logColours(monochrome_logs)
+    if (email_address) {
+        try {
+            if (plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') }
+            // Try to send HTML e-mail using sendmail
+            def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html")
+            sendmail_tf.withWriter { w -> w << sendmail_html }
+            [ 'sendmail', '-t' ].execute() << sendmail_html
+            log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-"
+        } catch (all) {
+            // Catch failures and try with plaintext
+            def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ]
+            mail_cmd.execute() << email_html
+            log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-"
+        }
+    }
+
+    // Write summary e-mail HTML to a file
+    def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html")
+    output_hf.withWriter { w -> w << email_html }
+    FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html");
+    output_hf.delete()
+
+    // Write summary e-mail TXT to a file
+    def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt")
+    output_tf.withWriter { w -> w << email_txt }
+    FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt");
+    output_tf.delete()
+}
+
+//
+// Print pipeline summary on completion
+//
+def completionSummary(monochrome_logs=true) {
+    Map colors = logColours(monochrome_logs)
+    if (workflow.success) {
+        if (workflow.stats.ignoredCount == 0) {
+            log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-"
+        } else {
+            log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-"
+        }
+    } else {
+        log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-"
+    }
+}
+
+//
+// Construct and send a notification to a web server as JSON e.g. Microsoft Teams and Slack
+//
+def imNotification(summary_params, hook_url) {
+    def summary = [:]
+    for (group in summary_params.keySet()) {
+        summary << summary_params[group]
+    }
+
+    def misc_fields = [:]
+    misc_fields['start']                                = workflow.start
+    misc_fields['complete']                             = workflow.complete
+    misc_fields['scriptfile']                           = workflow.scriptFile
+    misc_fields['scriptid']                             = workflow.scriptId
+    if (workflow.repository) misc_fields['repository']  = workflow.repository
+    if (workflow.commitId)   misc_fields['commitid']    = workflow.commitId
+    if (workflow.revision)   misc_fields['revision']    = workflow.revision
+    misc_fields['nxf_version']                          = workflow.nextflow.version
+    misc_fields['nxf_build']                            = workflow.nextflow.build
+    misc_fields['nxf_timestamp']                        = workflow.nextflow.timestamp
+
+    def msg_fields = [:]
+    msg_fields['version']      = getWorkflowVersion()
+    msg_fields['runName']      = workflow.runName
+    msg_fields['success']      = workflow.success
+    msg_fields['dateComplete'] = workflow.complete
+    msg_fields['duration']     = workflow.duration
+    msg_fields['exitStatus']   = workflow.exitStatus
+    msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
+    msg_fields['errorReport']  = (workflow.errorReport ?: 'None')
+    msg_fields['commandLine']  = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "")
+    msg_fields['projectDir']   = workflow.projectDir
+    msg_fields['summary']      = summary << misc_fields
+
+    // Render the JSON template
+    def engine       = new groovy.text.GStringTemplateEngine()
+    // Different JSON depending on the service provider
+    // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format
+    def json_path     = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json"
+    def hf            = new File("${workflow.projectDir}/assets/${json_path}")
+    def json_template = engine.createTemplate(hf).make(msg_fields)
+    def json_message  = json_template.toString()
+
+    // POST
+    def post = new URL(hook_url).openConnection();
+    post.setRequestMethod("POST")
+    post.setDoOutput(true)
+    post.setRequestProperty("Content-Type", "application/json")
+    post.getOutputStream().write(json_message.getBytes("UTF-8"));
+    def postRC = post.getResponseCode();
+    if (! postRC.equals(200)) {
+        log.warn(post.getErrorStream().getText());
+    }
+}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml b/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml
new file mode 100644
index 00000000..d08d2434
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml
@@ -0,0 +1,24 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
+name: "UTILS_NFCORE_PIPELINE"
+description: Subworkflow with utility functions specific to the nf-core pipeline template
+keywords:
+  - utility
+  - pipeline
+  - initialise
+  - version
+components: []
+input:
+  - nextflow_cli_args:
+      type: list
+      description: |
+        Nextflow CLI positional arguments
+output:
+  - success:
+      type: boolean
+      description: |
+        Dummy output to indicate success
+authors:
+  - "@adamrtalbot"
+maintainers:
+  - "@adamrtalbot"
+  - "@maxulysse"
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
new file mode 100644
index 00000000..1dc317f8
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
@@ -0,0 +1,134 @@
+
+nextflow_function {
+
+    name "Test Functions"
+    script "../main.nf"
+    config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config"
+    tag "subworkflows"
+    tag "subworkflows_nfcore"
+    tag "utils_nfcore_pipeline"
+    tag "subworkflows/utils_nfcore_pipeline"
+
+    test("Test Function checkConfigProvided") {
+
+        function "checkConfigProvided"
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+
+    test("Test Function checkProfileProvided") {
+
+        function "checkProfileProvided"
+
+        when {
+            function {
+                """
+                input[0] = []
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+
+    test("Test Function workflowCitation") {
+
+        function "workflowCitation"
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+
+    test("Test Function nfCoreLogo") {
+
+        function "nfCoreLogo"
+
+        when {
+            function {
+                """
+                input[0] = false
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+
+    test("Test Function dashedLine") {
+
+        function "dashedLine"
+
+        when {
+            function {
+                """
+                input[0] = false
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+
+    test("Test Function without logColours") {
+
+        function "logColours"
+
+        when {
+            function {
+                """
+                input[0] = true
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+
+    test("Test Function with logColours") {
+        function "logColours"
+
+        when {
+            function {
+                """
+                input[0] = false
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert function.success },
+                { assert snapshot(function.result).match() }
+            )
+        }
+    }
+}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
new file mode 100644
index 00000000..1037232c
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
@@ -0,0 +1,166 @@
+{
+    "Test Function checkProfileProvided": {
+        "content": null,
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:03:03.360873"
+    },
+    "Test Function checkConfigProvided": {
+        "content": [
+            true
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:02:59.729647"
+    },
+    "Test Function nfCoreLogo": {
+        "content": [
+            "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n                                        \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m        ___     __   __   __   ___     \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m  |\\ | |__  __ /  ` /  \\ |__) |__         \u001b[0;33m}  {\u001b[0m\n\u001b[0;34m  | \\| |       \\__, \\__/ |  \\ |___     \u001b[0;32m\\`-._,-`-,\u001b[0m\n                                        \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m  nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n"
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:03:10.562934"
+    },
+    "Test Function workflowCitation": {
+        "content": [
+            "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n  https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n  https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n  https://github.com/nextflow_workflow/blob/master/CITATIONS.md"
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:03:07.019761"
+    },
+    "Test Function without logColours": {
+        "content": [
+            {
+                "reset": "",
+                "bold": "",
+                "dim": "",
+                "underlined": "",
+                "blink": "",
+                "reverse": "",
+                "hidden": "",
+                "black": "",
+                "red": "",
+                "green": "",
+                "yellow": "",
+                "blue": "",
+                "purple": "",
+                "cyan": "",
+                "white": "",
+                "bblack": "",
+                "bred": "",
+                "bgreen": "",
+                "byellow": "",
+                "bblue": "",
+                "bpurple": "",
+                "bcyan": "",
+                "bwhite": "",
+                "ublack": "",
+                "ured": "",
+                "ugreen": "",
+                "uyellow": "",
+                "ublue": "",
+                "upurple": "",
+                "ucyan": "",
+                "uwhite": "",
+                "iblack": "",
+                "ired": "",
+                "igreen": "",
+                "iyellow": "",
+                "iblue": "",
+                "ipurple": "",
+                "icyan": "",
+                "iwhite": "",
+                "biblack": "",
+                "bired": "",
+                "bigreen": "",
+                "biyellow": "",
+                "biblue": "",
+                "bipurple": "",
+                "bicyan": "",
+                "biwhite": ""
+            }
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:03:17.969323"
+    },
+    "Test Function dashedLine": {
+        "content": [
+            "-\u001b[2m----------------------------------------------------\u001b[0m-"
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:03:14.366181"
+    },
+    "Test Function with logColours": {
+        "content": [
+            {
+                "reset": "\u001b[0m",
+                "bold": "\u001b[1m",
+                "dim": "\u001b[2m",
+                "underlined": "\u001b[4m",
+                "blink": "\u001b[5m",
+                "reverse": "\u001b[7m",
+                "hidden": "\u001b[8m",
+                "black": "\u001b[0;30m",
+                "red": "\u001b[0;31m",
+                "green": "\u001b[0;32m",
+                "yellow": "\u001b[0;33m",
+                "blue": "\u001b[0;34m",
+                "purple": "\u001b[0;35m",
+                "cyan": "\u001b[0;36m",
+                "white": "\u001b[0;37m",
+                "bblack": "\u001b[1;30m",
+                "bred": "\u001b[1;31m",
+                "bgreen": "\u001b[1;32m",
+                "byellow": "\u001b[1;33m",
+                "bblue": "\u001b[1;34m",
+                "bpurple": "\u001b[1;35m",
+                "bcyan": "\u001b[1;36m",
+                "bwhite": "\u001b[1;37m",
+                "ublack": "\u001b[4;30m",
+                "ured": "\u001b[4;31m",
+                "ugreen": "\u001b[4;32m",
+                "uyellow": "\u001b[4;33m",
+                "ublue": "\u001b[4;34m",
+                "upurple": "\u001b[4;35m",
+                "ucyan": "\u001b[4;36m",
+                "uwhite": "\u001b[4;37m",
+                "iblack": "\u001b[0;90m",
+                "ired": "\u001b[0;91m",
+                "igreen": "\u001b[0;92m",
+                "iyellow": "\u001b[0;93m",
+                "iblue": "\u001b[0;94m",
+                "ipurple": "\u001b[0;95m",
+                "icyan": "\u001b[0;96m",
+                "iwhite": "\u001b[0;97m",
+                "biblack": "\u001b[1;90m",
+                "bired": "\u001b[1;91m",
+                "bigreen": "\u001b[1;92m",
+                "biyellow": "\u001b[1;93m",
+                "biblue": "\u001b[1;94m",
+                "bipurple": "\u001b[1;95m",
+                "bicyan": "\u001b[1;96m",
+                "biwhite": "\u001b[1;97m"
+            }
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:03:21.714424"
+    }
+}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
new file mode 100644
index 00000000..8940d32d
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
@@ -0,0 +1,29 @@
+nextflow_workflow {
+
+    name "Test Workflow UTILS_NFCORE_PIPELINE"
+    script "../main.nf"
+    config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config"
+    workflow "UTILS_NFCORE_PIPELINE"
+    tag "subworkflows"
+    tag "subworkflows_nfcore"
+    tag "utils_nfcore_pipeline"
+    tag "subworkflows/utils_nfcore_pipeline"
+
+    test("Should run without failures") {
+
+        when {
+            workflow {
+                """
+                input[0] = []
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success },
+                { assert snapshot(workflow.out).match() }
+            )
+        }
+    }
+}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap
new file mode 100644
index 00000000..859d1030
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap
@@ -0,0 +1,19 @@
+{
+    "Should run without failures": {
+        "content": [
+            {
+                "0": [
+                    true
+                ],
+                "valid_config": [
+                    true
+                ]
+            }
+        ],
+        "meta": {
+            "nf-test": "0.8.4",
+            "nextflow": "23.10.1"
+        },
+        "timestamp": "2024-02-28T12:03:25.726491"
+    }
+}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config b/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config
new file mode 100644
index 00000000..d0a926bf
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config
@@ -0,0 +1,9 @@
+manifest {
+    name            = 'nextflow_workflow'
+    author          = """nf-core"""
+    homePage        = 'https://127.0.0.1'
+    description     = """Dummy pipeline"""
+    nextflowVersion  = '!>=23.04.0'
+    version         = '9.9.9'
+    doi             = 'https://doi.org/10.5281/zenodo.5070524'
+}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml b/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml
new file mode 100644
index 00000000..ac8523c9
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml
@@ -0,0 +1,2 @@
+subworkflows/utils_nfcore_pipeline:
+  - subworkflows/nf-core/utils_nfcore_pipeline/**
diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
new file mode 100644
index 00000000..2585b65d
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
@@ -0,0 +1,62 @@
+//
+// Subworkflow that uses the nf-validation plugin to render help text and parameter summary
+//
+
+/*
+========================================================================================
+    IMPORT NF-VALIDATION PLUGIN
+========================================================================================
+*/
+
+include { paramsHelp         } from 'plugin/nf-validation'
+include { paramsSummaryLog   } from 'plugin/nf-validation'
+include { validateParameters } from 'plugin/nf-validation'
+
+/*
+========================================================================================
+    SUBWORKFLOW DEFINITION
+========================================================================================
+*/
+
+workflow UTILS_NFVALIDATION_PLUGIN {
+
+    take:
+    print_help       // boolean: print help
+    workflow_command //  string: default commmand used to run pipeline
+    pre_help_text    //  string: string to be printed before help text and summary log
+    post_help_text   //  string: string to be printed after help text and summary log
+    validate_params  // boolean: validate parameters
+    schema_filename  //    path: JSON schema file, null to use default value
+
+    main:
+
+    log.debug "Using schema file: ${schema_filename}"
+
+    // Default values for strings
+    pre_help_text    = pre_help_text    ?: ''
+    post_help_text   = post_help_text   ?: ''
+    workflow_command = workflow_command ?: ''
+
+    //
+    // Print help message if needed
+    //
+    if (print_help) {
+        log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text
+        System.exit(0)
+    }
+
+    //
+    // Print parameter summary to stdout
+    //
+    log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text
+
+    //
+    // Validate parameters relative to the parameter JSON schema
+    //
+    if (validate_params){
+        validateParameters(parameters_schema: schema_filename)
+    }
+
+    emit:
+    dummy_emit = true
+}
diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml b/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml
new file mode 100644
index 00000000..3d4a6b04
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml
@@ -0,0 +1,44 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
+name: "UTILS_NFVALIDATION_PLUGIN"
+description: Use nf-validation to initiate and validate a pipeline
+keywords:
+  - utility
+  - pipeline
+  - initialise
+  - validation
+components: []
+input:
+  - print_help:
+      type: boolean
+      description: |
+        Print help message and exit
+  - workflow_command:
+      type: string
+      description: |
+        The command to run the workflow e.g. "nextflow run main.nf"
+  - pre_help_text:
+      type: string
+      description: |
+        Text to print before the help message
+  - post_help_text:
+      type: string
+      description: |
+        Text to print after the help message
+  - validate_params:
+      type: boolean
+      description: |
+        Validate the parameters and error if invalid.
+  - schema_filename:
+      type: string
+      description: |
+        The filename of the schema to validate against.
+output:
+  - dummy_emit:
+      type: boolean
+      description: |
+        Dummy emit to make nf-core subworkflows lint happy
+authors:
+  - "@adamrtalbot"
+maintainers:
+  - "@adamrtalbot"
+  - "@maxulysse"
diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test
new file mode 100644
index 00000000..5784a33f
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test
@@ -0,0 +1,200 @@
+nextflow_workflow {
+
+    name "Test Workflow UTILS_NFVALIDATION_PLUGIN"
+    script "../main.nf"
+    workflow "UTILS_NFVALIDATION_PLUGIN"
+    tag "subworkflows"
+    tag "subworkflows_nfcore"
+    tag "plugin/nf-validation"
+    tag "'plugin/nf-validation'"
+    tag "utils_nfvalidation_plugin"
+    tag "subworkflows/utils_nfvalidation_plugin"
+
+    test("Should run nothing") {
+
+        when {
+
+            params {
+                monochrome_logs = true
+                test_data       = ''
+            }
+
+            workflow {
+                """
+                help             = false
+                workflow_command = null
+                pre_help_text    = null
+                post_help_text   = null
+                validate_params  = false
+                schema_filename  = "$moduleTestDir/nextflow_schema.json"
+
+                input[0] = help
+                input[1] = workflow_command
+                input[2] = pre_help_text
+                input[3] = post_help_text
+                input[4] = validate_params
+                input[5] = schema_filename
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success }
+            )
+        }
+    }
+
+    test("Should run help") {
+
+
+        when {
+
+            params {
+                monochrome_logs = true
+                test_data       = ''
+            }
+            workflow {
+                """
+                help             = true
+                workflow_command = null
+                pre_help_text    = null
+                post_help_text   = null
+                validate_params  = false
+                schema_filename  = "$moduleTestDir/nextflow_schema.json"
+
+                input[0] = help
+                input[1] = workflow_command
+                input[2] = pre_help_text
+                input[3] = post_help_text
+                input[4] = validate_params
+                input[5] = schema_filename
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success },
+                { assert workflow.exitStatus == 0 },
+                { assert workflow.stdout.any { it.contains('Input/output options') } },
+                { assert workflow.stdout.any { it.contains('--outdir') } }
+            )
+        }
+    }
+
+    test("Should run help with command") {
+
+        when {
+
+            params {
+                monochrome_logs = true
+                test_data       = ''
+            }
+            workflow {
+                """
+                help             = true
+                workflow_command = "nextflow run noorg/doesntexist"
+                pre_help_text    = null
+                post_help_text   = null
+                validate_params  = false
+                schema_filename  = "$moduleTestDir/nextflow_schema.json"
+
+                input[0] = help
+                input[1] = workflow_command
+                input[2] = pre_help_text
+                input[3] = post_help_text
+                input[4] = validate_params
+                input[5] = schema_filename
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success },
+                { assert workflow.exitStatus == 0 },
+                { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } },
+                { assert workflow.stdout.any { it.contains('Input/output options') } },
+                { assert workflow.stdout.any { it.contains('--outdir') } }
+            )
+        }
+    }
+
+    test("Should run help with extra text") {
+
+
+        when {
+
+            params {
+                monochrome_logs = true
+                test_data       = ''
+            }
+            workflow {
+                """
+                help             = true
+                workflow_command = "nextflow run noorg/doesntexist"
+                pre_help_text    = "pre-help-text"
+                post_help_text   = "post-help-text"
+                validate_params  = false
+                schema_filename  = "$moduleTestDir/nextflow_schema.json"
+
+                input[0] = help
+                input[1] = workflow_command
+                input[2] = pre_help_text
+                input[3] = post_help_text
+                input[4] = validate_params
+                input[5] = schema_filename
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.success },
+                { assert workflow.exitStatus == 0 },
+                { assert workflow.stdout.any { it.contains('pre-help-text') } },
+                { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } },
+                { assert workflow.stdout.any { it.contains('Input/output options') } },
+                { assert workflow.stdout.any { it.contains('--outdir') } },
+                { assert workflow.stdout.any { it.contains('post-help-text') } }
+            )
+        }
+    }
+
+    test("Should validate params") {
+
+        when {
+
+            params {
+                monochrome_logs = true
+                test_data       = ''
+                outdir          = 1
+            }
+            workflow {
+                """
+                help             = false
+                workflow_command = null
+                pre_help_text    = null
+                post_help_text   = null
+                validate_params  = true
+                schema_filename  = "$moduleTestDir/nextflow_schema.json"
+
+                input[0] = help
+                input[1] = workflow_command
+                input[2] = pre_help_text
+                input[3] = post_help_text
+                input[4] = validate_params
+                input[5] = schema_filename
+                """
+            }
+        }
+
+        then {
+            assertAll(
+                { assert workflow.failed },
+                { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } }
+            )
+        }
+    }
+}
diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json
new file mode 100644
index 00000000..7626c1c9
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json
@@ -0,0 +1,96 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json",
+    "title": ". pipeline parameters",
+    "description": "",
+    "type": "object",
+    "definitions": {
+        "input_output_options": {
+            "title": "Input/output options",
+            "type": "object",
+            "fa_icon": "fas fa-terminal",
+            "description": "Define where the pipeline should find input data and save output data.",
+            "required": ["outdir"],
+            "properties": {
+                "validate_params": {
+                    "type": "boolean",
+                    "description": "Validate parameters?",
+                    "default": true,
+                    "hidden": true
+                },
+                "outdir": {
+                    "type": "string",
+                    "format": "directory-path",
+                    "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.",
+                    "fa_icon": "fas fa-folder-open"
+                },
+                "test_data_base": {
+                    "type": "string",
+                    "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules",
+                    "description": "Base for test data directory",
+                    "hidden": true
+                },
+                "test_data": {
+                    "type": "string",
+                    "description": "Fake test data param",
+                    "hidden": true
+                }
+            }
+        },
+        "generic_options": {
+            "title": "Generic options",
+            "type": "object",
+            "fa_icon": "fas fa-file-import",
+            "description": "Less common options for the pipeline, typically set in a config file.",
+            "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.",
+            "properties": {
+                "help": {
+                    "type": "boolean",
+                    "description": "Display help text.",
+                    "fa_icon": "fas fa-question-circle",
+                    "hidden": true
+                },
+                "version": {
+                    "type": "boolean",
+                    "description": "Display version and exit.",
+                    "fa_icon": "fas fa-question-circle",
+                    "hidden": true
+                },
+                "logo": {
+                    "type": "boolean",
+                    "default": true,
+                    "description": "Display nf-core logo in console output.",
+                    "fa_icon": "fas fa-image",
+                    "hidden": true
+                },
+                "singularity_pull_docker_container": {
+                    "type": "boolean",
+                    "description": "Pull Singularity container from Docker?",
+                    "hidden": true
+                },
+                "publish_dir_mode": {
+                    "type": "string",
+                    "default": "copy",
+                    "description": "Method used to save pipeline results to output directory.",
+                    "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.",
+                    "fa_icon": "fas fa-copy",
+                    "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"],
+                    "hidden": true
+                },
+                "monochrome_logs": {
+                    "type": "boolean",
+                    "description": "Use monochrome_logs",
+                    "hidden": true
+                }
+            }
+        }
+    },
+    "allOf": [
+        {
+            "$ref": "#/definitions/input_output_options"
+        },
+        {
+            "$ref": "#/definitions/generic_options"
+        }
+    ]
+}
diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml
new file mode 100644
index 00000000..60b1cfff
--- /dev/null
+++ b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml
@@ -0,0 +1,2 @@
+subworkflows/utils_nfvalidation_plugin:
+  - subworkflows/nf-core/utils_nfvalidation_plugin/**
diff --git a/workflows/chipseq.nf b/workflows/chipseq.nf
index f3f8e517..cae508d6 100644
--- a/workflows/chipseq.nf
+++ b/workflows/chipseq.nf
@@ -1,61 +1,12 @@
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-    PRINT PARAMS SUMMARY
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-include { paramsSummaryLog; paramsSummaryMap } from 'plugin/nf-validation'
-
-def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs)
-def citation = '\n' + WorkflowMain.citation(workflow) + '\n'
-def summary_params = paramsSummaryMap(workflow)
-
-// Print parameter summary log to screen
-log.info logo + paramsSummaryLog(workflow) + citation
-
-// Validate input parameters
-WorkflowChipseq.initialise(params, log)
-
-ch_input = file(params.input)
-
-// Save AWS IGenomes file containing annotation version
-def anno_readme = params.genomes[ params.genome ]?.readme
-if (anno_readme && file(anno_readme).exists()) {
-    file("${params.outdir}/genome/").mkdirs()
-    file(anno_readme).copyTo("${params.outdir}/genome/")
-}
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-    CONFIG FILES
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-ch_multiqc_config          = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true)
-ch_multiqc_custom_config   = params.multiqc_config ? Channel.fromPath( params.multiqc_config ): Channel.empty()
-ch_multiqc_logo            = params.multiqc_logo   ? Channel.fromPath( params.multiqc_logo )  : Channel.empty()
-ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true)
-
-// JSON files required by BAMTools for alignment filtering
-ch_bamtools_filter_se_config = file(params.bamtools_filter_se_config)
-ch_bamtools_filter_pe_config = file(params.bamtools_filter_pe_config)
-
-// Header files for MultiQC
-ch_spp_nsc_header           = file("$projectDir/assets/multiqc/spp_nsc_header.txt", checkIfExists: true)
-ch_spp_rsc_header           = file("$projectDir/assets/multiqc/spp_rsc_header.txt", checkIfExists: true)
-ch_spp_correlation_header   = file("$projectDir/assets/multiqc/spp_correlation_header.txt", checkIfExists: true)
-ch_peak_count_header        = file("$projectDir/assets/multiqc/peak_count_header.txt", checkIfExists: true)
-ch_frip_score_header        = file("$projectDir/assets/multiqc/frip_score_header.txt", checkIfExists: true)
-ch_peak_annotation_header   = file("$projectDir/assets/multiqc/peak_annotation_header.txt", checkIfExists: true)
-ch_deseq2_pca_header        = file("$projectDir/assets/multiqc/deseq2_pca_header.txt", checkIfExists: true)
-ch_deseq2_clustering_header = file("$projectDir/assets/multiqc/deseq2_clustering_header.txt", checkIfExists: true)
-
 /*
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
     IMPORT LOCAL MODULES/SUBWORKFLOWS
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 */
 
+//
+// MODULE: Loaded from modules/local/
+//
 include { BEDTOOLS_GENOMECOV                  } from '../modules/local/bedtools_genomecov'
 include { FRIP_SCORE                          } from '../modules/local/frip_score'
 include { PLOT_MACS2_QC                       } from '../modules/local/plot_macs2_qc'
@@ -71,10 +22,13 @@ include { MULTIQC_CUSTOM_PEAKS                } from '../modules/local/multiqc_c
 //
 // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules
 //
-include { INPUT_CHECK         } from '../subworkflows/local/input_check'
-include { PREPARE_GENOME      } from '../subworkflows/local/prepare_genome'
-include { ALIGN_STAR          } from '../subworkflows/local/align_star'
-include { BAM_FILTER_BAMTOOLS } from '../subworkflows/local/bam_filter_bamtools'
+include { paramsSummaryMap       } from 'plugin/nf-validation'
+include { paramsSummaryMultiqc   } from '../subworkflows/nf-core/utils_nfcore_pipeline'
+include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline'
+include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_chipseq_pipeline'
+include { INPUT_CHECK            } from '../subworkflows/local/input_check'
+include { ALIGN_STAR             } from '../subworkflows/local/align_star'
+include { BAM_FILTER_BAMTOOLS    } from '../subworkflows/local/bam_filter_bamtools'
 
 /*
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -98,7 +52,6 @@ include { DEEPTOOLS_PLOTFINGERPRINT     } from '../modules/nf-core/deeptools/plo
 include { KHMER_UNIQUEKMERS             } from '../modules/nf-core/khmer/uniquekmers/main'
 include { MACS2_CALLPEAK                } from '../modules/nf-core/macs2/callpeak/main'
 include { SUBREAD_FEATURECOUNTS         } from '../modules/nf-core/subread/featurecounts/main'
-include { CUSTOM_DUMPSOFTWAREVERSIONS   } from '../modules/nf-core/custom/dumpsoftwareversions/main'
 
 include { HOMER_ANNOTATEPEAKS as HOMER_ANNOTATEPEAKS_MACS2     } from '../modules/nf-core/homer/annotatepeaks/main'
 include { HOMER_ANNOTATEPEAKS as HOMER_ANNOTATEPEAKS_CONSENSUS } from '../modules/nf-core/homer/annotatepeaks/main'
@@ -119,26 +72,56 @@ include { BAM_MARKDUPLICATES_PICARD        } from '../subworkflows/nf-core/bam_m
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 */
 
-// Info required for completion email and summary
-def multiqc_report = []
+// JSON files required by BAMTools for alignment filtering
+ch_bamtools_filter_se_config = file(params.bamtools_filter_se_config)
+ch_bamtools_filter_pe_config = file(params.bamtools_filter_pe_config)
+
+// Header files for MultiQC
+ch_spp_nsc_header           = file("$projectDir/assets/multiqc/spp_nsc_header.txt", checkIfExists: true)
+ch_spp_rsc_header           = file("$projectDir/assets/multiqc/spp_rsc_header.txt", checkIfExists: true)
+ch_spp_correlation_header   = file("$projectDir/assets/multiqc/spp_correlation_header.txt", checkIfExists: true)
+ch_peak_count_header        = file("$projectDir/assets/multiqc/peak_count_header.txt", checkIfExists: true)
+ch_frip_score_header        = file("$projectDir/assets/multiqc/frip_score_header.txt", checkIfExists: true)
+ch_peak_annotation_header   = file("$projectDir/assets/multiqc/peak_annotation_header.txt", checkIfExists: true)
+ch_deseq2_pca_header        = file("$projectDir/assets/multiqc/deseq2_pca_header.txt", checkIfExists: true)
+ch_deseq2_clustering_header = file("$projectDir/assets/multiqc/deseq2_clustering_header.txt", checkIfExists: true)
+
+// Save AWS IGenomes file containing annotation version
+def anno_readme = params.genomes[ params.genome ]?.readme
+if (anno_readme && file(anno_readme).exists()) {
+    file("${params.outdir}/genome/").mkdirs()
+    file(anno_readme).copyTo("${params.outdir}/genome/")
+}
+
+
+// // Info required for completion email and summary
+// def multiqc_report = []
 
 workflow CHIPSEQ {
 
-    ch_versions = Channel.empty()
+    take:
+    ch_samplesheet   // channel: path(sample_sheet.csv)
+    ch_versions      // channel: [ path(versions.yml) ]
+    ch_fasta         // channel: path(genome.fa)
+    ch_fai           // channel: path(genome.fai)
+    ch_gtf           // channel: path(genome.gtf)
+    ch_gene_bed      // channel: path(gene.beds)
+    ch_chrom_sizes   // channel: path(chrom.sizes)
+    ch_filtered_bed  // channel: path(filtered.bed)
+    ch_bwa_index     // channel: path(bwa/index/)
+    ch_bowtie2_index // channel: path(bowtie2/index)
+    ch_chromap_index // channel: path(chromap.index)
+    ch_star_index    // channel: path(star/index/)
 
-    //
-    // SUBWORKFLOW: Uncompress and prepare reference genome files
-    //
-    PREPARE_GENOME (
-        params.aligner
-    )
-    ch_versions = ch_versions.mix(PREPARE_GENOME.out.versions)
+    main:
+    ch_multiqc_files = Channel.empty()
 
     //
     // SUBWORKFLOW: Read in samplesheet, validate and stage input files
     //
+    ch_input = file(ch_samplesheet)
     INPUT_CHECK (
-        file(params.input),
+        ch_input,
         params.seq_center
     )
     ch_versions = ch_versions.mix(INPUT_CHECK.out.versions)
@@ -171,13 +154,11 @@ workflow CHIPSEQ {
     if (params.aligner == 'bwa') {
         FASTQ_ALIGN_BWA (
             FASTQ_FASTQC_UMITOOLS_TRIMGALORE.out.reads,
-            PREPARE_GENOME.out.bwa_index,
+            ch_bwa_index,
             false,
-            PREPARE_GENOME
-                .out
-                .fasta
+            ch_fasta
                 .map {
-                        [ [:], it ]
+                    [ [:], it ]
                 }
         )
         ch_genome_bam        = FASTQ_ALIGN_BWA.out.bam
@@ -194,12 +175,12 @@ workflow CHIPSEQ {
     if (params.aligner == 'bowtie2') {
         FASTQ_ALIGN_BOWTIE2 (
             FASTQ_FASTQC_UMITOOLS_TRIMGALORE.out.reads,
-            PREPARE_GENOME.out.bowtie2_index,
+            ch_bowtie2_index,
             params.save_unaligned,
             false,
-            PREPARE_GENOME.out.fasta
+            ch_fasta
                 .map {
-                        [ [:], it ]
+                    [ [:], it ]
                 }
         )
         ch_genome_bam        = FASTQ_ALIGN_BOWTIE2.out.bam
@@ -216,10 +197,8 @@ workflow CHIPSEQ {
     if (params.aligner == 'chromap') {
         FASTQ_ALIGN_CHROMAP (
             FASTQ_FASTQC_UMITOOLS_TRIMGALORE.out.reads,
-            PREPARE_GENOME.out.chromap_index,
-            PREPARE_GENOME
-                .out
-                .fasta
+            ch_chromap_index,
+            ch_fasta
                 .map {
                     [ [:], it ]
                 },
@@ -242,10 +221,8 @@ workflow CHIPSEQ {
     if (params.aligner == 'star') {
         ALIGN_STAR (
             FASTQ_FASTQC_UMITOOLS_TRIMGALORE.out.reads,
-            PREPARE_GENOME.out.star_index,
-            PREPARE_GENOME
-                .out
-                .fasta
+            ch_star_index,
+            ch_fasta
                 .map {
                         [ [:], it ]
                 },
@@ -290,13 +267,11 @@ workflow CHIPSEQ {
     //
     BAM_MARKDUPLICATES_PICARD (
         PICARD_MERGESAMFILES.out.bam,
-        PREPARE_GENOME
-            .out
-            .fasta
+        ch_fasta
             .map {
-                    [ [:], it ]
+                [ [:], it ]
             },
-        PREPARE_GENOME.out.fai
+        ch_fai
             .map {
                 [ [:], it ]
             }
@@ -308,10 +283,8 @@ workflow CHIPSEQ {
     //
     BAM_FILTER_BAMTOOLS (
         BAM_MARKDUPLICATES_PICARD.out.bam.join(BAM_MARKDUPLICATES_PICARD.out.bai, by: [0]),
-        PREPARE_GENOME.out.filtered_bed.first(),
-        PREPARE_GENOME
-            .out
-            .fasta
+        ch_filtered_bed.first(),
+        ch_fasta
             .map {
                 [ [:], it ]
             },
@@ -344,15 +317,11 @@ workflow CHIPSEQ {
                 .map {
                     [ it[0], it[1], [] ]
                 },
-            PREPARE_GENOME
-                .out
-                .fasta
+            ch_fasta
                 .map {
                     [ [:], it ]
                 },
-            PREPARE_GENOME
-                .out
-                .fai
+            ch_fai
                 .map {
                     [ [:], it ]
                 }
@@ -402,7 +371,7 @@ workflow CHIPSEQ {
     //
     UCSC_BEDGRAPHTOBIGWIG (
         BEDTOOLS_GENOMECOV.out.bedgraph,
-        PREPARE_GENOME.out.chrom_sizes
+        ch_chrom_sizes
     )
     ch_versions = ch_versions.mix(UCSC_BEDGRAPHTOBIGWIG.out.versions.first())
 
@@ -413,7 +382,7 @@ workflow CHIPSEQ {
         //
         DEEPTOOLS_COMPUTEMATRIX (
             UCSC_BEDGRAPHTOBIGWIG.out.bigwig,
-            PREPARE_GENOME.out.gene_bed
+            ch_gene_bed
         )
         ch_versions = ch_versions.mix(DEEPTOOLS_COMPUTEMATRIX.out.versions.first())
 
@@ -475,6 +444,7 @@ workflow CHIPSEQ {
     //
     // MODULE: Calculute genome size with khmer
     //
+    // TODO move to prepare genome
     ch_macs_gsize                     = Channel.empty()
     ch_custompeaks_frip_multiqc       = Channel.empty()
     ch_custompeaks_count_multiqc      = Channel.empty()
@@ -483,7 +453,7 @@ workflow CHIPSEQ {
     ch_macs_gsize = params.macs_gsize
     if (!params.macs_gsize) {
         KHMER_UNIQUEKMERS (
-            PREPARE_GENOME.out.fasta,
+            ch_fasta,
             params.read_length
         )
         ch_macs_gsize = KHMER_UNIQUEKMERS.out.kmers.map { it.text.trim() }
@@ -561,8 +531,8 @@ workflow CHIPSEQ {
         //
         HOMER_ANNOTATEPEAKS_MACS2 (
             ch_macs2_peaks,
-            PREPARE_GENOME.out.fasta,
-            PREPARE_GENOME.out.gtf
+            ch_fasta,
+            ch_gtf
         )
         ch_versions = ch_versions.mix(HOMER_ANNOTATEPEAKS_MACS2.out.versions.first())
 
@@ -640,8 +610,8 @@ workflow CHIPSEQ {
             //
             HOMER_ANNOTATEPEAKS_CONSENSUS (
                 MACS2_CONSENSUS.out.bed,
-                PREPARE_GENOME.out.fasta,
-                PREPARE_GENOME.out.gtf
+                ch_fasta,
+                ch_gtf
             )
             ch_versions = ch_versions.mix(HOMER_ANNOTATEPEAKS_CONSENSUS.out.versions)
 
@@ -708,7 +678,7 @@ workflow CHIPSEQ {
         IGV (
             params.aligner,
             params.narrow_peak ? 'narrow_peak' : 'broad_peak',
-            PREPARE_GENOME.out.fasta,
+            ch_fasta,
             UCSC_BEDGRAPHTOBIGWIG.out.bigwig.collect{it[1]}.ifEmpty([]),
             ch_macs2_peaks.collect{it[1]}.ifEmpty([]),
             ch_macs2_consensus_bed_lib.collect{it[1]}.ifEmpty([]),
@@ -718,29 +688,29 @@ workflow CHIPSEQ {
     }
 
     //
-    // MODULE: Pipeline reporting
+    // Collate and save software versions
     //
-    CUSTOM_DUMPSOFTWAREVERSIONS (
-        ch_versions.unique().collectFile(name: 'collated_versions.yml')
-    )
+    softwareVersionsToYAML(ch_versions)
+        .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_atacseq_software_mqc_versions.yml', sort: true, newLine: true)
+        .set { ch_collated_versions }
 
     //
     // MODULE: MultiQC
     //
     if (!params.skip_multiqc) {
-        workflow_summary    = WorkflowChipseq.paramsSummaryMultiqc(workflow, summary_params)
-        ch_workflow_summary = Channel.value(workflow_summary)
-
-    methods_description    = WorkflowChipseq.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params)
-    ch_methods_description = Channel.value(methods_description)
+        ch_multiqc_config        = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true)
+        ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ): Channel.empty()
+        ch_multiqc_logo          = params.multiqc_logo   ? Channel.fromPath( params.multiqc_logo )  : Channel.empty()
+        summary_params           = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json")
+        ch_workflow_summary      = Channel.value(paramsSummaryMultiqc(summary_params))
+        ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml'))
+        ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions)
 
         MULTIQC (
-            ch_multiqc_config,
-            ch_multiqc_custom_config.collect().ifEmpty([]),
-            CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect(),
-            ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml'),
-            ch_methods_description.collectFile(name: 'methods_description_mqc.yaml'),
-            ch_multiqc_logo.collect().ifEmpty([]),
+            ch_multiqc_files.collect(),
+            ch_multiqc_config.toList(),
+            ch_multiqc_custom_config.toList(),
+            ch_multiqc_logo.toList(),
 
             FASTQ_FASTQC_UMITOOLS_TRIMGALORE.out.fastqc_zip.collect{it[1]}.ifEmpty([]),
             FASTQ_FASTQC_UMITOOLS_TRIMGALORE.out.trim_zip.collect{it[1]}.ifEmpty([]),
@@ -778,32 +748,12 @@ workflow CHIPSEQ {
             ch_deseq2_pca_multiqc.collect().ifEmpty([]),
             ch_deseq2_clustering_multiqc.collect().ifEmpty([])
         )
-        multiqc_report = MULTIQC.out.report.toList()
+        ch_multiqc_report = MULTIQC.out.report
     }
-}
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-    COMPLETION EMAIL AND SUMMARY
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
 
-workflow.onComplete {
-    if (params.email || params.email_on_fail) {
-        NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report)
-    }
-    NfcoreTemplate.dump_parameters(workflow, params)
-    NfcoreTemplate.summary(workflow, params, log)
-    if (params.hook_url) {
-        NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log)
-    }
-}
-
-workflow.onError {
-    if (workflow.errorReport.contains("Process requirement exceeds available memory")) {
-        println("πŸ›‘ Default resources exceed availability πŸ›‘ ")
-        println("πŸ’‘ See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources πŸ’‘")
-    }
+    emit:
+    multiqc_report = ch_multiqc_report  // channel: /path/to/multiqc_report.html
+    versions       = ch_versions       // channel: [ path(versions.yml) ]
 }
 
 /*