diff --git a/CHANGELOG.md b/CHANGELOG.md index 384989da..6dea6662 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # nextflow-io/nf-schema: Changelog +# Version 2.2.0 - Kitakata + +## New features + +1. Added a new configuration option `validation.failUnrecognisedHeaders`. This is the analogue to `failUnrecognisedParams`, but for samplesheet headers. The default is `false` which means that unrecognized headers throw a warning instead of an error. +2. Added a new configuration option `validation.summary.hideParams`. This option takes a list of parameter names to hide from the parameters summary created by `paramsSummaryMap()` and `paramsSummaryLog()` + +## Bug fixes + +1. Fixed a bug in `samplesheetToList` that caused output mixing when the function was used more than once in channel operators. +2. Added a missing depencency for email format validation. +3. All path formats (with exception to `file-path-pattern`) will now give a proper error message when a `file-path-pattern` has been used. + +## Improvements + +1. Improved the `exists` keyword documentation with a warning about an edge case. +2. Updated the error messages. Custom error messages provided in the JSON schema will now be appended to the original error messages instead of overwriting them. + # Version 2.1.2 ## Bug fixes diff --git a/README.md b/README.md index 77c46ea9..bd04471e 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Declare the plugin in your Nextflow pipeline configuration file: ```groovy title="nextflow.config" plugins { - id 'nf-schema@2.1.2' + id 'nf-schema@2.2.0' } ``` diff --git a/docs/configuration/configuration.md b/docs/configuration/configuration.md index 13d72525..2570eb6f 100644 --- a/docs/configuration/configuration.md +++ b/docs/configuration/configuration.md @@ -54,6 +54,14 @@ By default the `validateParameters()` function will only give a warning if an un validation.failUnrecognisedParams = // default: false ``` +## failUnrecognisedHeaders + +By default the `samplesheetToList()` function will only give a warning if an unrecognised header is present in the samplesheet. This usually indicates that a typo has been made and can be easily overlooked when the plugin only emits a warning. You can turn this warning into an error with the `failUnrecognisedHeaders` option. + +```groovy +validation.failUnrecognisedHeaders = // default: false +``` + ## showHiddenParams !!! deprecated @@ -218,3 +226,11 @@ validation.summary.afterText = "Please cite the pipeline owners when using this !!! info All color values (like `\033[0;31m`, which means the color red) will be filtered out when `validation.monochromeLogs` is set to `true` + +### hideParams + +Takes a list of parameter names to exclude from the parameters summary created by `paramsSummaryMap()` and `paramsSummaryLog()` + +```groovy +validation.summary.hideParams = ["param1", "nested.param"] // default: [] +``` diff --git a/docs/nextflow_schema/nextflow_schema_specification.md b/docs/nextflow_schema/nextflow_schema_specification.md index 70f826ad..f91ddcd5 100644 --- a/docs/nextflow_schema/nextflow_schema_specification.md +++ b/docs/nextflow_schema/nextflow_schema_specification.md @@ -231,7 +231,7 @@ If validation fails, an error message is printed to the terminal, so that the en However, these messages are not always very clear - especially to newcomers. To improve this experience, pipeline developers can set a custom `errorMessage` for a given parameter in a the schema. -If validation fails, this `errorMessage` is printed instead, and the raw JSON schema validation message goes to the Nextflow debug log output. +If validation fails, this `errorMessage` is printed after the original error message to guide the pipeline users to an easier solution. For example, instead of printing: @@ -252,7 +252,7 @@ We can set and get: ``` -* --input (samples.yml): File name must end in '.csv' cannot contain spaces +* --input (samples.yml): "samples.yml" does not match regular expression [^\S+\.csv$] (File name must end in '.csv' cannot contain spaces) ``` ### `deprecated` @@ -389,7 +389,11 @@ Example usage is as follows: !!! note - If the parameter is an S3 URL path, this validation is ignored. + If the parameter is an S3, Azure or Google Cloud URI path, this validation is ignored. + +!!! warning + + Make sure to only use the `exists` keyword in combination with any file path format. Using `exists` on a normal string will assume that it's a file and will probably fail unexpectedly. ### `mimetype` diff --git a/plugins/nf-schema/build.gradle b/plugins/nf-schema/build.gradle index 3d72ac62..c4ee257c 100644 --- a/plugins/nf-schema/build.gradle +++ b/plugins/nf-schema/build.gradle @@ -56,6 +56,7 @@ dependencies { compileOnly 'org.pf4j:pf4j:3.4.1' implementation 'org.json:json:20240303' implementation 'dev.harrel:json-schema:1.5.0' + implementation 'com.sanctionco.jmail:jmail:1.6.3' // Needed for e-mail format validation // test configuration testImplementation "io.nextflow:nextflow:$nextflowVersion" diff --git a/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatDirectoryPathEvaluator.groovy b/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatDirectoryPathEvaluator.groovy index 6ed6cbff..3b9d65c9 100644 --- a/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatDirectoryPathEvaluator.groovy +++ b/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatDirectoryPathEvaluator.groovy @@ -33,6 +33,9 @@ class FormatDirectoryPathEvaluator implements Evaluator { // Actual validation logic def Path file = Nextflow.file(value) as Path + if (file instanceof List) { + return Evaluator.Result.failure("'${value}' is not a directory, but a file path pattern" as String) + } if (file.exists() && !file.isDirectory()) { return Evaluator.Result.failure("'${value}' is not a directory, but a file" as String) } diff --git a/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatFilePathEvaluator.groovy b/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatFilePathEvaluator.groovy index 3b761537..6bfcebcb 100644 --- a/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatFilePathEvaluator.groovy +++ b/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatFilePathEvaluator.groovy @@ -33,6 +33,9 @@ class FormatFilePathEvaluator implements Evaluator { // Actual validation logic def Path file = Nextflow.file(value) as Path + if (file instanceof List) { + return Evaluator.Result.failure("'${value}' is not a file, but a file path pattern" as String) + } if (file.exists() && file.isDirectory()) { return Evaluator.Result.failure("'${value}' is not a file, but a directory" as String) } diff --git a/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatPathEvaluator.groovy b/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatPathEvaluator.groovy index bb51f517..e72945ed 100644 --- a/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatPathEvaluator.groovy +++ b/plugins/nf-schema/src/main/nextflow/validation/CustomEvaluators/FormatPathEvaluator.groovy @@ -33,6 +33,9 @@ class FormatPathEvaluator implements Evaluator { // Actual validation logic def Path file = Nextflow.file(value) as Path + if (file instanceof List) { + return Evaluator.Result.failure("'${value}' is not a path, but a file path pattern" as String) + } return Evaluator.Result.success() } } \ No newline at end of file diff --git a/plugins/nf-schema/src/main/nextflow/validation/JsonSchemaValidator.groovy b/plugins/nf-schema/src/main/nextflow/validation/JsonSchemaValidator.groovy index dbcd85ab..a0f0d85a 100644 --- a/plugins/nf-schema/src/main/nextflow/validation/JsonSchemaValidator.groovy +++ b/plugins/nf-schema/src/main/nextflow/validation/JsonSchemaValidator.groovy @@ -22,9 +22,9 @@ import java.util.regex.Matcher @CompileStatic public class JsonSchemaValidator { - private static ValidatorFactory validator - private static Pattern uriPattern = Pattern.compile('^#/(\\d*)?/?(.*)$') - private static ValidationConfig config + private ValidatorFactory validator + private Pattern uriPattern = Pattern.compile('^#/(\\d*)?/?(.*)$') + private ValidationConfig config JsonSchemaValidator(ValidationConfig config) { this.validator = new ValidatorFactory() @@ -34,14 +34,14 @@ public class JsonSchemaValidator { this.config = config } - private static List validateObject(JsonNode input, String validationType, Object rawJson, String schemaString) { + private List validateObject(JsonNode input, String validationType, Object rawJson, String schemaString) { def JSONObject schema = new JSONObject(schemaString) def String draft = Utils.getValueFromJson("#/\$schema", schema) if(draft != "https://json-schema.org/draft/2020-12/schema") { log.error("""Failed to load the meta schema: The used schema draft (${draft}) is not correct, please use \"https://json-schema.org/draft/2020-12/schema\" instead. - If you are a pipeline developer, check our migration guide for more information: https://nextflow-io.github.io/nf-schema/latest/migration_guide/ - - If you are a pipeline user, pin the previous version of the plugin (1.1.3) to avoid this error: https://www.nextflow.io/docs/latest/plugins.html#using-plugins, i.e. set `plugins { + - If you are a pipeline user, revert back to nf-validation to avoid this error: https://www.nextflow.io/docs/latest/plugins.html#using-plugins, i.e. set `plugins { id 'nf-validation@1.1.3' }` in your `nextflow.config` file """) @@ -50,11 +50,11 @@ public class JsonSchemaValidator { def Validator.Result result = this.validator.validate(schema, input) def List errors = [] - for (error : result.getErrors()) { + result.getErrors().each { error -> def String errorString = error.getError() // Skip double error in the parameter schema if (errorString.startsWith("Value does not match against the schemas at indexes") && validationType == "parameter") { - continue + return } def String instanceLocation = error.getInstanceLocation() @@ -68,48 +68,47 @@ public class JsonSchemaValidator { } // Change some error messages to make them more clear - if (customError == "") { - def String keyword = error.getKeyword() - if (keyword == "required") { - def Matcher matcher = errorString =~ ~/\[\[([^\[\]]*)\]\]$/ - def String missingKeywords = matcher.findAll().flatten().last() - customError = "Missing required ${validationType}(s): ${missingKeywords}" - } + def String keyword = error.getKeyword() + if (keyword == "required") { + def Matcher matcher = errorString =~ ~/\[\[([^\[\]]*)\]\]$/ + def String missingKeywords = matcher.findAll().flatten().last() + errorString = "Missing required ${validationType}(s): ${missingKeywords}" } def List locationList = instanceLocation.split("/").findAll { it != "" } as List + def String printableError = "${validationType == 'field' ? '->' : '*'} ${errorString}" as String if (locationList.size() > 0 && Utils.isInteger(locationList[0]) && validationType == "field") { def Integer entryInteger = locationList[0] as Integer def String entryString = "Entry ${entryInteger + 1}" as String - def String fieldError = "" + def String fieldError = "${errorString}" as String if(locationList.size() > 1) { - fieldError = "Error for ${validationType} '${locationList[1..-1].join("/")}' (${value}): ${customError ?: errorString}" - } else { - fieldError = "${customError ?: errorString}" as String + fieldError = "Error for ${validationType} '${locationList[1..-1].join("/")}' (${value}): ${errorString}" } - errors.add("-> ${entryString}: ${fieldError}" as String) + printableError = "-> ${entryString}: ${fieldError}" as String } else if (validationType == "parameter") { def String fieldName = locationList.join(".") if(fieldName != "") { - errors.add("* --${fieldName} (${value}): ${customError ?: errorString}" as String) - } else { - errors.add("* ${customError ?: errorString}" as String) + printableError = "* --${fieldName} (${value}): ${errorString}" as String } - } else { - errors.add("-> ${customError ?: errorString}" as String) } + if(customError != "") { + printableError = printableError + " (${customError})" + } + + errors.add(printableError) + } return errors } - public static List validate(JSONArray input, String schemaString) { + public List validate(JSONArray input, String schemaString) { def JsonNode jsonInput = new OrgJsonNode.Factory().wrap(input) return this.validateObject(jsonInput, "field", input, schemaString) } - public static List validate(JSONObject input, String schemaString) { + public List validate(JSONObject input, String schemaString) { def JsonNode jsonInput = new OrgJsonNode.Factory().wrap(input) return this.validateObject(jsonInput, "parameter", input, schemaString) } diff --git a/plugins/nf-schema/src/main/nextflow/validation/SamplesheetConverter.groovy b/plugins/nf-schema/src/main/nextflow/validation/SamplesheetConverter.groovy index 7d432b18..010a14d8 100644 --- a/plugins/nf-schema/src/main/nextflow/validation/SamplesheetConverter.groovy +++ b/plugins/nf-schema/src/main/nextflow/validation/SamplesheetConverter.groovy @@ -19,82 +19,85 @@ import nextflow.Nextflow @CompileStatic class SamplesheetConverter { - private static Path samplesheetFile - private static Path schemaFile - private static ValidationConfig config - private static Map options - - SamplesheetConverter(Path samplesheetFile, Path schemaFile, ValidationConfig config, Map options) { - this.samplesheetFile = samplesheetFile - this.schemaFile = schemaFile + private ValidationConfig config + + SamplesheetConverter(ValidationConfig config) { this.config = config - this.options = options } - private static List rows = [] - private static Map meta = [:] + private List rows = [] + private Map meta = [:] - private static Map getMeta() { + private Map getMeta() { this.meta } - private static Map resetMeta() { + private Map resetMeta() { this.meta = [:] } - private static addMeta(Map newEntries) { + private addMeta(Map newEntries) { this.meta = this.meta + newEntries } - private static Boolean isMeta() { + private Boolean isMeta() { this.meta.size() > 0 } - private static List unusedHeaders = [] + private List unrecognisedHeaders = [] - private static addUnusedHeader (String header) { - this.unusedHeaders.add(header) + private addUnrecognisedHeader (String header) { + this.unrecognisedHeaders.add(header) } - private static logUnusedHeadersWarning(String fileName) { - def Set unusedHeaders = this.unusedHeaders as Set - if(unusedHeaders.size() > 0) { - def String processedHeaders = unusedHeaders.collect { "\t- ${it}" }.join("\n") - log.warn("Found the following unidentified headers in ${fileName}:\n${processedHeaders}" as String) + private logUnrecognisedHeaders(String fileName) { + def Set unrecognisedHeaders = this.unrecognisedHeaders as Set + if(unrecognisedHeaders.size() > 0) { + def String processedHeaders = unrecognisedHeaders.collect { "\t- ${it}" }.join("\n") + def String msg = "Found the following unidentified headers in ${fileName}:\n${processedHeaders}\n" as String + if( config.failUnrecognisedHeaders ) { + throw new SchemaValidationException(msg) + } else { + log.warn(msg) + } } } /* Convert the samplesheet to a list of entries based on a schema */ - public static List validateAndConvertToList() { + public List validateAndConvertToList( + Path samplesheetFile, + Path schemaFile, + Map options + ) { def colors = Utils.logColours(config.monochromeLogs) // Some checks before validating - if(!this.schemaFile.exists()) { - def msg = "${colors.red}JSON schema file ${this.schemaFile.toString()} does not exist\n${colors.reset}\n" + if(!schemaFile.exists()) { + def msg = "${colors.red}JSON schema file ${schemaFile.toString()} does not exist\n${colors.reset}\n" throw new SchemaValidationException(msg) } - if(!this.samplesheetFile.exists()) { - def msg = "${colors.red}Samplesheet file ${this.samplesheetFile.toString()} does not exist\n${colors.reset}\n" + if(!samplesheetFile.exists()) { + def msg = "${colors.red}Samplesheet file ${samplesheetFile.toString()} does not exist\n${colors.reset}\n" throw new SchemaValidationException(msg) } // Validate final validator = new JsonSchemaValidator(config) - def JSONArray samplesheet = Utils.fileToJsonArray(this.samplesheetFile, this.schemaFile) - def List validationErrors = validator.validate(samplesheet, this.schemaFile.text) + def JSONArray samplesheet = Utils.fileToJsonArray(samplesheetFile, schemaFile) + def List validationErrors = validator.validate(samplesheet, schemaFile.text) if (validationErrors) { - def msg = "${colors.red}The following errors have been detected in ${this.samplesheetFile.toString()}:\n\n" + validationErrors.join('\n').trim() + "\n${colors.reset}\n" + def msg = "${colors.red}The following errors have been detected in ${samplesheetFile.toString()}:\n\n" + validationErrors.join('\n').trim() + "\n${colors.reset}\n" log.error("Validation of samplesheet failed!") throw new SchemaValidationException(msg, validationErrors) } // Convert - def LinkedHashMap schemaMap = new JsonSlurper().parseText(this.schemaFile.text) as LinkedHashMap - def List samplesheetList = Utils.fileToList(this.samplesheetFile, this.schemaFile) + def LinkedHashMap schemaMap = new JsonSlurper().parseText(schemaFile.text) as LinkedHashMap + def List samplesheetList = Utils.fileToList(samplesheetFile, schemaFile) this.rows = [] @@ -110,15 +113,18 @@ class SamplesheetConverter { } return result } - logUnusedHeadersWarning(this.samplesheetFile.toString()) + + logUnrecognisedHeaders(samplesheetFile.toString()) + return channelFormat + } /* This function processes an input value based on a schema. The output will be created for addition to the output channel. */ - private static Object formatEntry(Object input, LinkedHashMap schema, String headerPrefix = "") { + private Object formatEntry(Object input, LinkedHashMap schema, String headerPrefix = "") { // Add default values for missing entries input = input != null ? input : schema.containsKey("default") ? schema.default : [] @@ -129,7 +135,7 @@ class SamplesheetConverter { def Set unusedKeys = input.keySet() - properties.keySet() // Check for properties in the samplesheet that have not been defined in the schema - unusedKeys.each{addUnusedHeader("${headerPrefix}${it}" as String)} + unusedKeys.each{addUnrecognisedHeader("${headerPrefix}${it}" as String)} // Loop over every property to maintain the correct order properties.each { property, schemaValues -> @@ -166,15 +172,15 @@ class SamplesheetConverter { } - private static List validPathFormats = ["file-path", "path", "directory-path", "file-path-pattern"] - private static List schemaOptions = ["anyOf", "oneOf", "allOf"] + private List validPathFormats = ["file-path", "path", "directory-path", "file-path-pattern"] + private List schemaOptions = ["anyOf", "oneOf", "allOf"] /* This function processes a value that's not a map or list and casts it to a file type if necessary. When there is uncertainty if the value should be a path, some simple logic is applied that tries to guess if it should be a file type */ - private static Object processValue(Object value, Map schemaEntry) { + private Object processValue(Object value, Map schemaEntry) { if(!(value instanceof String)) { return value } @@ -228,7 +234,7 @@ class SamplesheetConverter { This function processes an input value based on a schema. The output will be created for addition to the meta map. */ - private static Object processMeta(Object input, LinkedHashMap schema, String headerPrefix) { + private Object processMeta(Object input, LinkedHashMap schema, String headerPrefix) { // Add default values for missing entries input = input != null ? input : schema.containsKey("default") ? schema.default : [] @@ -238,7 +244,7 @@ class SamplesheetConverter { def Set unusedKeys = input.keySet() - properties.keySet() // Check for properties in the samplesheet that have not been defined in the schema - unusedKeys.each{addUnusedHeader("${headerPrefix}${it}" as String)} + unusedKeys.each{addUnrecognisedHeader("${headerPrefix}${it}" as String)} // Loop over every property to maintain the correct order properties.each { property, schemaValues -> diff --git a/plugins/nf-schema/src/main/nextflow/validation/SchemaValidator.groovy b/plugins/nf-schema/src/main/nextflow/validation/SchemaValidator.groovy index 61aa1bbc..fd8e0305 100644 --- a/plugins/nf-schema/src/main/nextflow/validation/SchemaValidator.groovy +++ b/plugins/nf-schema/src/main/nextflow/validation/SchemaValidator.groovy @@ -40,7 +40,7 @@ import org.yaml.snakeyaml.Yaml @CompileStatic class SchemaValidator extends PluginExtensionPoint { - static final List NF_OPTIONS = [ + final List NF_OPTIONS = [ // Options for base `nextflow` command 'bg', 'c', @@ -224,8 +224,9 @@ class SchemaValidator extends PluginExtensionPoint { final Path schema, final Map options = null ) { - def SamplesheetConverter converter = new SamplesheetConverter(samplesheet, schema, config, options) - return converter.validateAndConvertToList() + def SamplesheetConverter converter = new SamplesheetConverter(config) + def List output = converter.validateAndConvertToList(samplesheet, schema, options) + return output } // @@ -431,6 +432,19 @@ Please contact the pipeline maintainer(s) if you see this warning as a user. def Map paramsMap = Utils.paramsLoad( Path.of(Utils.getSchemaPath(session.baseDir.toString(), schemaFilename)) ) for (group in paramsMap.keySet()) { def Map groupSummary = getSummaryMapFromParams(params, paramsMap.get(group) as Map) + config.summary.hideParams.each { hideParam -> + def List hideParamList = hideParam.tokenize(".") as List + def Integer indexCounter = 0 + def Map nestedSummary = groupSummary + if(hideParamList.size() >= 2 ) { + hideParamList[0..-2].each { it -> + nestedSummary = nestedSummary?.get(it, null) + } + } + if(nestedSummary != null ) { + nestedSummary.remove(hideParamList[-1]) + } + } paramsSummary.put(group, groupSummary) } paramsSummary.put('Core Nextflow options', workflowSummary) @@ -542,7 +556,7 @@ Please contact the pipeline maintainer(s) if you see this warning as a user. // // Clean and check parameters relative to Nextflow native classes // - private static Map cleanParameters(Map params) { + private Map cleanParameters(Map params) { def Map new_params = (Map) params.getClass().newInstance(params) for (p in params) { // remove anything evaluating to false diff --git a/plugins/nf-schema/src/main/nextflow/validation/config/SummaryConfig.groovy b/plugins/nf-schema/src/main/nextflow/validation/config/SummaryConfig.groovy index 9b177864..451c8fcd 100644 --- a/plugins/nf-schema/src/main/nextflow/validation/config/SummaryConfig.groovy +++ b/plugins/nf-schema/src/main/nextflow/validation/config/SummaryConfig.groovy @@ -19,6 +19,7 @@ import groovy.transform.PackageScope class SummaryConfig { final public String beforeText final public String afterText + final public List hideParams SummaryConfig(Map map, Boolean monochromeLogs) { def config = map ?: Collections.emptyMap() @@ -29,5 +30,6 @@ class SummaryConfig { beforeText = config.beforeText ?: "" afterText = config.afterText ?: "" } + this.hideParams = config.hideParams ?: [] } } \ No newline at end of file diff --git a/plugins/nf-schema/src/main/nextflow/validation/config/ValidationConfig.groovy b/plugins/nf-schema/src/main/nextflow/validation/config/ValidationConfig.groovy index 172eb23e..bad520d9 100644 --- a/plugins/nf-schema/src/main/nextflow/validation/config/ValidationConfig.groovy +++ b/plugins/nf-schema/src/main/nextflow/validation/config/ValidationConfig.groovy @@ -21,6 +21,7 @@ class ValidationConfig { final public Boolean lenientMode final public Boolean monochromeLogs final public Boolean failUnrecognisedParams + final public Boolean failUnrecognisedHeaders final public String parametersSchema final public Boolean showHiddenParams final public HelpConfig help @@ -30,10 +31,11 @@ class ValidationConfig { ValidationConfig(Map map, Map params){ def config = map ?: Collections.emptyMap() - lenientMode = config.lenientMode ?: false - monochromeLogs = config.monochromeLogs ?: false - failUnrecognisedParams = config.failUnrecognisedParams ?: false - showHiddenParams = config.showHiddenParams ?: false + lenientMode = config.lenientMode ?: false + monochromeLogs = config.monochromeLogs ?: false + failUnrecognisedParams = config.failUnrecognisedParams ?: false + failUnrecognisedHeaders = config.failUnrecognisedHeaders ?: false + showHiddenParams = config.showHiddenParams ?: false if(config.containsKey("showHiddenParams")) { log.warn("configuration option `validation.showHiddenParams` is deprecated, please use `validation.help.showHidden` or the `--showHidden` parameter instead") } diff --git a/plugins/nf-schema/src/resources/META-INF/MANIFEST.MF b/plugins/nf-schema/src/resources/META-INF/MANIFEST.MF index cd4b1971..a946f05a 100644 --- a/plugins/nf-schema/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-schema/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Id: nf-schema -Plugin-Version: 2.1.2 +Plugin-Version: 2.2.0 Plugin-Class: nextflow.validation.ValidationPlugin Plugin-Provider: nextflow Plugin-Requires: >=23.10.0 diff --git a/plugins/nf-schema/src/test/nextflow/validation/ParamsSummaryLogTest.groovy b/plugins/nf-schema/src/test/nextflow/validation/ParamsSummaryLogTest.groovy index 61ef1798..7c7b33c1 100644 --- a/plugins/nf-schema/src/test/nextflow/validation/ParamsSummaryLogTest.groovy +++ b/plugins/nf-schema/src/test/nextflow/validation/ParamsSummaryLogTest.groovy @@ -184,4 +184,96 @@ class ParamsSummaryLogTest extends Dsl2Spec{ stdout.size() == 13 stdout ==~ /.*\[0;34moutdir : .\[0;32moutDir.*/ } + + def 'should print params summary - nested parameters - hide params' () { + given: + def schema = Path.of('src/testResources/nextflow_schema_nested_parameters.json').toAbsolutePath().toString() + def SCRIPT = """ + params.this.is.so.deep = "changed_value" + include { paramsSummaryLog } from 'plugin/nf-schema' + + def summary_params = paramsSummaryLog(workflow, parameters_schema: '$schema') + log.info summary_params + """ + + when: + def config = [ + "params": [ + "this": [ + "is": [ + "so": [ + "deep": true + ] + ] + ] + ], + "validation": [ + "summary": [ + "hideParams": ["params.this.is.so.deep"] + ] + ] + ] + def result = new MockScriptRunner(config).setScript(SCRIPT).execute() + def stdout = capture + .toString() + .readLines() + .findResults {it.contains('Only displaying parameters that differ from the pipeline defaults') || + it.contains('Core Nextflow options') || + it.contains('runName') || + it.contains('launchDir') || + it.contains('workDir') || + it.contains('projectDir') || + it.contains('userName') || + it.contains('profile') || + it.contains('configFiles') || + it.contains('Nested Parameters') || + it.contains('this.is.so.deep ') + ? it : null } + + then: + noExceptionThrown() + stdout.size() == 10 + stdout !=~ /.*\[0;34mthis.is.so.deep: .\[0;32mchanged_value.*/ + } + + def 'should print params summary - hide params' () { + given: + def schema = Path.of('src/testResources/nextflow_schema.json').toAbsolutePath().toString() + def SCRIPT = """ + params.outdir = "outDir" + include { paramsSummaryLog } from 'plugin/nf-schema' + + def summary_params = paramsSummaryLog(workflow, parameters_schema: '$schema') + log.info summary_params + """ + + when: + def config = [ + "validation": [ + "summary": [ + "hideParams": ["outdir"] + ] + ] + ] + def result = new MockScriptRunner(config).setScript(SCRIPT).execute() + def stdout = capture + .toString() + .readLines() + .findResults {it.contains('Only displaying parameters that differ from the pipeline defaults') || + it.contains('Core Nextflow options') || + it.contains('runName') || + it.contains('launchDir') || + it.contains('workDir') || + it.contains('projectDir') || + it.contains('userName') || + it.contains('profile') || + it.contains('configFiles') || + it.contains('outdir ') + ? it : null } + + then: + noExceptionThrown() + stdout.size() == 9 + stdout !=~ /.*\[0;34moutdir : .\[0;32moutDir.*/ + } } \ No newline at end of file diff --git a/plugins/nf-schema/src/test/nextflow/validation/SamplesheetConverterTest.groovy b/plugins/nf-schema/src/test/nextflow/validation/SamplesheetConverterTest.groovy index c2ed023f..ee09992c 100644 --- a/plugins/nf-schema/src/test/nextflow/validation/SamplesheetConverterTest.groovy +++ b/plugins/nf-schema/src/test/nextflow/validation/SamplesheetConverterTest.groovy @@ -11,6 +11,7 @@ import org.pf4j.PluginDescriptorFinder import spock.lang.Shared import test.Dsl2Spec import test.OutputCapture +import test.MockScriptRunner /** * @author : mirpedrol @@ -337,7 +338,7 @@ class SamplesheetConverterTest extends Dsl2Spec{ def 'extra field' () { given: - def SCRIPT_TEXT = ''' + def SCRIPT = ''' include { samplesheetToList } from 'plugin/nf-schema' params.input = "src/testResources/extraFields.csv" @@ -350,7 +351,8 @@ class SamplesheetConverterTest extends Dsl2Spec{ ''' when: - dsl_eval(SCRIPT_TEXT) + def config = [:] + new MockScriptRunner(config).setScript(SCRIPT).execute() def stdout = capture .toString() .readLines() @@ -368,6 +370,35 @@ class SamplesheetConverterTest extends Dsl2Spec{ stdout.contains("[[string1:extraField, string2:extraField, integer1:10, integer2:10, boolean1:true, boolean2:true], string1, 25, false, ${getRootString()}/src/testResources/test.txt, ${getRootString()}/src/testResources/testDir, [], unique3, 1, itDoesExist]" as String) } + def 'extra field - fail' () { + given: + def SCRIPT = ''' + include { samplesheetToList } from 'plugin/nf-schema' + + params.input = "src/testResources/extraFields.csv" + params.schema = "src/testResources/schema_input.json" + + workflow { + Channel.fromList(samplesheetToList(params.input, params.schema)) + .view() + } + ''' + + when: + def config = [ + "validation": [ + "failUnrecognisedHeaders": true + ] + ] + new MockScriptRunner(config).setScript(SCRIPT).execute() + + then: + def error = thrown(SchemaValidationException) + error.message == """Found the following unidentified headers in ${getRootString()}/src/testResources/extraFields.csv: +\t- extraField +""" as String + } + def 'no meta' () { given: def SCRIPT_TEXT = ''' @@ -537,4 +568,42 @@ class SamplesheetConverterTest extends Dsl2Spec{ stdout.contains("[[string1:dependentRequired, string2:dependentRequired, integer1:10, integer2:10, boolean1:true, boolean2:true], string1, 25, false, [], [], [], unique2, 1, itDoesExist]") stdout.contains("[[string1:extraField, string2:extraField, integer1:10, integer2:10, boolean1:true, boolean2:true], string1, 25, false, ${getRootString()}/src/testResources/test.txt, ${getRootString()}/src/testResources/testDir, ${getRootString()}/src/testResources/testDir, unique3, 1, itDoesExist]" as String) } + + def 'samplesheetToList - usage in channels' () { + given: + def SCRIPT_TEXT = ''' + include { samplesheetToList } from 'plugin/nf-schema' + + Channel.of("src/testResources/correct.csv") + .flatMap { it -> + samplesheetToList(it, "src/testResources/schema_input.json") + } + .map { it -> println("first: ${it}") } + + Channel.of("src/testResources/correct_arrays.json") + .flatMap { it -> + samplesheetToList(it, "src/testResources/schema_input_with_arrays.json") + } + .map { it -> println("second: ${it}") } + + ''' + + when: + dsl_eval(SCRIPT_TEXT) + def stdout = capture + .toString() + .readLines() + .findResults {it.startsWith('first') || it.startsWith('second') ? it : null } + + then: + noExceptionThrown() + stdout.contains("first: [[string1:fullField, string2:fullField, integer1:10, integer2:10, boolean1:true, boolean2:true], string1, 25.12, false, ${getRootString()}/src/testResources/test.txt, ${getRootString()}/src/testResources/testDir, ${getRootString()}/src/testResources/test.txt, unique1, 1, itDoesExist]" as String) + stdout.contains("first: [[string1:value, string2:value, integer1:0, integer2:0, boolean1:true, boolean2:true], string1, 25.08, false, [], [], [], [], [], itDoesExist]") + stdout.contains("first: [[string1:dependentRequired, string2:dependentRequired, integer1:10, integer2:10, boolean1:true, boolean2:true], string1, 25, false, [], [], [], unique2, 1, itDoesExist]") + stdout.contains("first: [[string1:extraField, string2:extraField, integer1:10, integer2:10, boolean1:true, boolean2:true], string1, 25, false, ${getRootString()}/src/testResources/test.txt, ${getRootString()}/src/testResources/testDir, ${getRootString()}/src/testResources/testDir, unique3, 1, itDoesExist]" as String) + stdout.contains("second: [[array_meta:[]], [${getRootString()}/src/testResources/testDir/testFile.txt, ${getRootString()}/src/testResources/testDir2/testFile2.txt], [${getRootString()}/src/testResources/testDir, ${getRootString()}/src/testResources/testDir2], [${getRootString()}/src/testResources/testDir, ${getRootString()}/src/testResources/testDir2/testFile2.txt], [string1, string2], [25, 26], [25, 26.5], [false, true], [1, 2, 3], [true], [${getRootString()}/src/testResources/testDir/testFile.txt], [[${getRootString()}/src/testResources/testDir/testFile.txt]]]" as String) + stdout.contains("second: [[array_meta:[look, an, array, in, meta]], [], [], [], [string1, string2], [25, 26], [25, 26.5], [], [1, 2, 3], [false, true, false], [${getRootString()}/src/testResources/testDir/testFile.txt], [[${getRootString()}/src/testResources/testDir/testFile.txt]]]" as String) + stdout.contains("second: [[array_meta:[]], [], [], [], [string1, string2], [25, 26], [25, 26.5], [], [1, 2, 3], [false, true, false], [${getRootString()}/src/testResources/testDir/testFile.txt], [[${getRootString()}/src/testResources/testDir/testFile.txt], [${getRootString()}/src/testResources/testDir/testFile.txt, ${getRootString()}/src/testResources/testDir2/testFile2.txt]]]" as String) + + } } diff --git a/plugins/nf-schema/src/test/nextflow/validation/ValidateParametersTest.groovy b/plugins/nf-schema/src/test/nextflow/validation/ValidateParametersTest.groovy index 8636ac07..2591e355 100644 --- a/plugins/nf-schema/src/test/nextflow/validation/ValidateParametersTest.groovy +++ b/plugins/nf-schema/src/test/nextflow/validation/ValidateParametersTest.groovy @@ -238,12 +238,12 @@ class ValidateParametersTest extends Dsl2Spec{ errorMessages[0] == "\033[0;31mThe following invalid input values have been detected:" errorMessages[1] == "" errorMessages[2] == "* --input (src/testResources/wrong.csv): Validation of file failed:" - errorMessages[3] == "\t-> Entry 1: Error for field 'strandedness' (weird): Strandedness must be provided and be one of 'forward', 'reverse' or 'unstranded'" + errorMessages[3] == "\t-> Entry 1: Error for field 'strandedness' (weird): Expected any of [[forward, reverse, unstranded]] (Strandedness must be provided and be one of 'forward', 'reverse' or 'unstranded')" errorMessages[4] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): \"test1_fastq2.fasta\" does not match regular expression [^\\S+\\.f(ast)?q\\.gz\$]" errorMessages[5] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): \"test1_fastq2.fasta\" is longer than 0 characters" - errorMessages[6] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" + errorMessages[6] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): Value does not match against any of the schemas (FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz')" errorMessages[7] == "\t-> Entry 1: Missing required field(s): sample" - errorMessages[8] == "\t-> Entry 2: Error for field 'sample' (test 2): Sample name must be provided and cannot contain spaces" + errorMessages[8] == "\t-> Entry 2: Error for field 'sample' (test 2): \"test 2\" does not match regular expression [^\\S+\$] (Sample name must be provided and cannot contain spaces)" !stdout } @@ -272,12 +272,12 @@ class ValidateParametersTest extends Dsl2Spec{ errorMessages[0] == "\033[0;31mThe following invalid input values have been detected:" errorMessages[1] == "" errorMessages[2] == "* --input (src/testResources/wrong.tsv): Validation of file failed:" - errorMessages[3] == "\t-> Entry 1: Error for field 'strandedness' (weird): Strandedness must be provided and be one of 'forward', 'reverse' or 'unstranded'" + errorMessages[3] == "\t-> Entry 1: Error for field 'strandedness' (weird): Expected any of [[forward, reverse, unstranded]] (Strandedness must be provided and be one of 'forward', 'reverse' or 'unstranded')" errorMessages[4] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): \"test1_fastq2.fasta\" does not match regular expression [^\\S+\\.f(ast)?q\\.gz\$]" errorMessages[5] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): \"test1_fastq2.fasta\" is longer than 0 characters" - errorMessages[6] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" + errorMessages[6] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): Value does not match against any of the schemas (FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz')" errorMessages[7] == "\t-> Entry 1: Missing required field(s): sample" - errorMessages[8] == "\t-> Entry 2: Error for field 'sample' (test 2): Sample name must be provided and cannot contain spaces" + errorMessages[8] == "\t-> Entry 2: Error for field 'sample' (test 2): \"test 2\" does not match regular expression [^\\S+\$] (Sample name must be provided and cannot contain spaces)" !stdout } @@ -306,12 +306,12 @@ class ValidateParametersTest extends Dsl2Spec{ errorMessages[0] == "\033[0;31mThe following invalid input values have been detected:" errorMessages[1] == "" errorMessages[2] == "* --input (src/testResources/wrong.yaml): Validation of file failed:" - errorMessages[3] == "\t-> Entry 1: Error for field 'strandedness' (weird): Strandedness must be provided and be one of 'forward', 'reverse' or 'unstranded'" + errorMessages[3] == "\t-> Entry 1: Error for field 'strandedness' (weird): Expected any of [[forward, reverse, unstranded]] (Strandedness must be provided and be one of 'forward', 'reverse' or 'unstranded')" errorMessages[4] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): \"test1_fastq2.fasta\" does not match regular expression [^\\S+\\.f(ast)?q\\.gz\$]" errorMessages[5] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): \"test1_fastq2.fasta\" is longer than 0 characters" - errorMessages[6] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" + errorMessages[6] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): Value does not match against any of the schemas (FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz')" errorMessages[7] == "\t-> Entry 1: Missing required field(s): sample" - errorMessages[8] == "\t-> Entry 2: Error for field 'sample' (test 2): Sample name must be provided and cannot contain spaces" + errorMessages[8] == "\t-> Entry 2: Error for field 'sample' (test 2): \"test 2\" does not match regular expression [^\\S+\$] (Sample name must be provided and cannot contain spaces)" !stdout } @@ -340,12 +340,12 @@ class ValidateParametersTest extends Dsl2Spec{ errorMessages[0] == "\033[0;31mThe following invalid input values have been detected:" errorMessages[1] == "" errorMessages[2] == "* --input (src/testResources/wrong.json): Validation of file failed:" - errorMessages[3] == "\t-> Entry 1: Error for field 'strandedness' (weird): Strandedness must be provided and be one of 'forward', 'reverse' or 'unstranded'" + errorMessages[3] == "\t-> Entry 1: Error for field 'strandedness' (weird): Expected any of [[forward, reverse, unstranded]] (Strandedness must be provided and be one of 'forward', 'reverse' or 'unstranded')" errorMessages[4] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): \"test1_fastq2.fasta\" does not match regular expression [^\\S+\\.f(ast)?q\\.gz\$]" errorMessages[5] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): \"test1_fastq2.fasta\" is longer than 0 characters" - errorMessages[6] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" + errorMessages[6] == "\t-> Entry 1: Error for field 'fastq_2' (test1_fastq2.fasta): Value does not match against any of the schemas (FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz')" errorMessages[7] == "\t-> Entry 1: Missing required field(s): sample" - errorMessages[8] == "\t-> Entry 2: Error for field 'sample' (test 2): Sample name must be provided and cannot contain spaces" + errorMessages[8] == "\t-> Entry 2: Error for field 'sample' (test 2): \"test 2\" does not match regular expression [^\\S+\$] (Sample name must be provided and cannot contain spaces)" !stdout } @@ -868,13 +868,11 @@ class ValidateParametersTest extends Dsl2Spec{ then: def error = thrown(SchemaValidationException) - error.message == """The following invalid input values have been detected: - -* --input (src/testResources/samplesheet_wrong_pattern.csv): Validation of file failed: -\t-> Entry 1: Error for field 'fastq_1' (test1_fastq1.txt): FastQ file for reads 1 must be provided, cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz' -\t-> Entry 2: Error for field 'fastq_1' (test2_fastq1.txt): FastQ file for reads 1 must be provided, cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz' - -""" + def errorMessage = error.message.tokenize("\n") + errorMessage[0] == "The following invalid input values have been detected:" + errorMessage[1] == "* --input (src/testResources/samplesheet_wrong_pattern.csv): Validation of file failed:" + errorMessage[2] == "\t-> Entry 1: Error for field 'fastq_1' (test1_fastq1.txt): \"test1_fastq1.txt\" does not match regular expression [^\\S+\\.f(ast)?q\\.gz\$] (FastQ file for reads 1 must be provided, cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz')" + errorMessage[3] == "\t-> Entry 2: Error for field 'fastq_1' (test2_fastq1.txt): \"test2_fastq1.txt\" does not match regular expression [^\\S+\\.f(ast)?q\\.gz\$] (FastQ file for reads 1 must be provided, cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz')" !stdout } @@ -1095,4 +1093,108 @@ class ValidateParametersTest extends Dsl2Spec{ !stdout } + def 'should validate an email' () { + given: + def schema = Path.of('src/testResources/nextflow_schema.json').toAbsolutePath().toString() + def SCRIPT = """ + params.input = 'src/testResources/samplesheet.csv' + params.outdir = 'src/testResources/testDir' + params.email = "test@domain.com" + include { validateParameters } from 'plugin/nf-schema' + + validateParameters(parameters_schema: '$schema') + """ + + when: + def config = [:] + def result = new MockScriptRunner(config).setScript(SCRIPT).execute() + def stdout = capture + .toString() + .readLines() + .findResults {it.contains('WARN nextflow.validation.SchemaValidator') || it.startsWith('* --') ? it : null } + + then: + noExceptionThrown() + !stdout + } + + def 'should validate an email - failure' () { + given: + def schema = Path.of('src/testResources/nextflow_schema.json').toAbsolutePath().toString() + def SCRIPT = """ + params.input = 'src/testResources/samplesheet.csv' + params.outdir = 'src/testResources/testDir' + params.email = "thisisnotanemail" + include { validateParameters } from 'plugin/nf-schema' + + validateParameters(parameters_schema: '$schema') + """ + + when: + def config = [:] + def result = new MockScriptRunner(config).setScript(SCRIPT).execute() + def stdout = capture + .toString() + .readLines() + .findResults {it.contains('WARN nextflow.validation.SchemaValidator') || it.startsWith('* --') ? it : null } + + + then: + def error = thrown(SchemaValidationException) + error.message.contains("* --email (thisisnotanemail): \"thisisnotanemail\" is not a valid email address") + !stdout + } + + def 'should give an error when a file-path-pattern is used with a file-path format' () { + given: + def schema = Path.of('src/testResources/nextflow_schema.json').toAbsolutePath().toString() + def SCRIPT = """ + params.input = 'src/testResources/*.csv' + params.outdir = 'src/testResources/testDir' + include { validateParameters } from 'plugin/nf-schema' + + validateParameters(parameters_schema: '$schema') + """ + + when: + def config = [:] + def result = new MockScriptRunner(config).setScript(SCRIPT).execute() + def stdout = capture + .toString() + .readLines() + .findResults {it.contains('WARN nextflow.validation.SchemaValidator') || it.startsWith('* --') ? it : null } + + + then: + def error = thrown(SchemaValidationException) + error.message.contains("* --input (src/testResources/*.csv): 'src/testResources/*.csv' is not a file, but a file path pattern") + !stdout + } + + def 'should give an error when a file-path-pattern is used with a directory-path format' () { + given: + def schema = Path.of('src/testResources/nextflow_schema.json').toAbsolutePath().toString() + def SCRIPT = """ + params.input = 'src/testResources/samplesheet.csv' + params.outdir = 'src/testResources/testDi*' + include { validateParameters } from 'plugin/nf-schema' + + validateParameters(parameters_schema: '$schema') + """ + + when: + def config = [:] + def result = new MockScriptRunner(config).setScript(SCRIPT).execute() + def stdout = capture + .toString() + .readLines() + .findResults {it.contains('WARN nextflow.validation.SchemaValidator') || it.startsWith('* --') ? it : null } + + + then: + def error = thrown(SchemaValidationException) + error.message.contains("* --outdir (src/testResources/testDi*): 'src/testResources/testDi*' is not a directory, but a file path pattern") + !stdout + } + } \ No newline at end of file diff --git a/plugins/nf-schema/src/testResources/nextflow_schema.json b/plugins/nf-schema/src/testResources/nextflow_schema.json index 7dcf103b..1fb1565b 100644 --- a/plugins/nf-schema/src/testResources/nextflow_schema.json +++ b/plugins/nf-schema/src/testResources/nextflow_schema.json @@ -30,9 +30,9 @@ "email": { "type": "string", "description": "Email address for completion summary.", + "format": "email", "fa_icon": "fas fa-envelope", - "help_text": "Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run.", - "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$" + "help_text": "Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run." }, "multiqc_title": { "type": "string",