From 420c75332c7264a9f25165b9f9ef96c0f6201fb3 Mon Sep 17 00:00:00 2001 From: GlassOfWhiskey Date: Tue, 9 Sep 2025 18:01:49 +0200 Subject: [PATCH] Prepare for CWL v1.3 This commit generates a `cwl_v1_3` parser with Schema SALAD, updates the utility functions to correctly handle the new version, and adds some tests to confirm that the new codes behaves correctly. --- README.rst | 7 +- cwl_utils/cwl_v1_3_expression_refactor.py | 2254 ++ cwl_utils/parser/__init__.py | 165 +- cwl_utils/parser/cwl_v1_1_utils.py | 37 +- cwl_utils/parser/cwl_v1_2_utils.py | 37 +- cwl_utils/parser/cwl_v1_3.py | 30425 ++++++++++++++++ cwl_utils/parser/cwl_v1_3_utils.py | 668 + cwl_utils/parser/utils.py | 64 + testdata/all-output-loop_v1_3.cwl | 28 + ...l => cond-single-source-wf-003.1.v1_2.cwl} | 2 +- testdata/cond-single-source-wf-003.1.v1_3.cwl | 24 + ...l => cond-single-source-wf-004.1.v1_2.cwl} | 2 +- testdata/cond-single-source-wf-004.1.v1_3.cwl | 24 + ...l => cond-single-source-wf-005.1.v1_2.cwl} | 2 +- testdata/cond-single-source-wf-005.1.v1_3.cwl | 24 + ...nd-wf-003.1.cwl => cond-wf-003.1.v1_2.cwl} | 0 testdata/cond-wf-003.1.v1_3.cwl | 35 + ...nd-wf-004.1.cwl => cond-wf-004.1.v1_2.cwl} | 0 testdata/cond-wf-004.1.v1_3.cwl | 35 + ...nd-wf-005.1.cwl => cond-wf-005.1.v1_2.cwl} | 0 testdata/cond-wf-005.1.v1_3.cwl | 35 + .../count-lines6-single-source-wf_v1_3.cwl | 24 + testdata/count-lines6-wf_v1_3.cwl | 26 + .../count-lines7-single-source-wf_v1_3.cwl | 20 + testdata/count-lines7-wf_v1_3.cwl | 24 + testdata/echo_v1_3.cwl | 14 + .../{foo-array.cwl => foo-array.v1_2.cwl} | 0 testdata/foo-array.v1_3.cwl | 13 + .../{formattest2.cwl => formattest2_v1_2.cwl} | 0 testdata/formattest2_v1_3.cwl | 27 + testdata/map-ordering-v1_3.cwl | 38 + testdata/record-output-wf_v1_3.cwl | 33 + testdata/record-output_v1_3.cwl | 39 + testdata/scatter-wf1_v1_3.cwl | 37 + testdata/scatter-wf2_v1_3.cwl | 51 + testdata/scatter-wf3_v1_3.cwl | 48 + testdata/single-var-loop_v1_3.cwl | 28 + testdata/stdout-wf_v1_3.cwl | 20 + testdata/step-valuefrom2-wf_v1_3.cwl | 41 + testdata/step-valuefrom3-wf_v1_3.cwl | 41 + testdata/step_valuefrom5_wf_v1_3.cwl | 74 + testdata/step_valuefrom5_wf_with_id_v1_3.cwl | 75 + testdata/wc3-tool_v1_3.cwl | 24 + testdata/{wf2.cwl => wf2.v1_2.cwl} | 0 testdata/wf2.v1_3.cwl | 30 + testdata/workflow_input_format_expr_v1_3.cwl | 36 + .../workflow_input_sf_expr_array_v1_3.cwl | 40 + testdata/workflow_input_sf_expr_v1_3.cwl | 39 + tests/test_etools_to_clt.py | 79 +- tests/test_format.py | 2 +- tests/test_packing.py | 4 +- tests/test_parser.py | 36 +- tests/test_parser_utils.py | 484 +- 53 files changed, 35183 insertions(+), 132 deletions(-) create mode 100755 cwl_utils/cwl_v1_3_expression_refactor.py create mode 100644 cwl_utils/parser/cwl_v1_3.py create mode 100644 cwl_utils/parser/cwl_v1_3_utils.py create mode 100755 testdata/all-output-loop_v1_3.cwl rename testdata/{cond-single-source-wf-003.1.cwl => cond-single-source-wf-003.1.v1_2.cwl} (92%) create mode 100755 testdata/cond-single-source-wf-003.1.v1_3.cwl rename testdata/{cond-single-source-wf-004.1.cwl => cond-single-source-wf-004.1.v1_2.cwl} (92%) create mode 100755 testdata/cond-single-source-wf-004.1.v1_3.cwl rename testdata/{cond-single-source-wf-005.1.cwl => cond-single-source-wf-005.1.v1_2.cwl} (92%) create mode 100755 testdata/cond-single-source-wf-005.1.v1_3.cwl rename testdata/{cond-wf-003.1.cwl => cond-wf-003.1.v1_2.cwl} (100%) create mode 100755 testdata/cond-wf-003.1.v1_3.cwl rename testdata/{cond-wf-004.1.cwl => cond-wf-004.1.v1_2.cwl} (100%) create mode 100755 testdata/cond-wf-004.1.v1_3.cwl rename testdata/{cond-wf-005.1.cwl => cond-wf-005.1.v1_2.cwl} (100%) create mode 100755 testdata/cond-wf-005.1.v1_3.cwl create mode 100755 testdata/count-lines6-single-source-wf_v1_3.cwl create mode 100755 testdata/count-lines6-wf_v1_3.cwl create mode 100755 testdata/count-lines7-single-source-wf_v1_3.cwl create mode 100755 testdata/count-lines7-wf_v1_3.cwl create mode 100755 testdata/echo_v1_3.cwl rename testdata/{foo-array.cwl => foo-array.v1_2.cwl} (100%) create mode 100755 testdata/foo-array.v1_3.cwl rename testdata/{formattest2.cwl => formattest2_v1_2.cwl} (100%) create mode 100755 testdata/formattest2_v1_3.cwl create mode 100755 testdata/map-ordering-v1_3.cwl create mode 100755 testdata/record-output-wf_v1_3.cwl create mode 100755 testdata/record-output_v1_3.cwl create mode 100755 testdata/scatter-wf1_v1_3.cwl create mode 100755 testdata/scatter-wf2_v1_3.cwl create mode 100755 testdata/scatter-wf3_v1_3.cwl create mode 100755 testdata/single-var-loop_v1_3.cwl create mode 100755 testdata/stdout-wf_v1_3.cwl create mode 100755 testdata/step-valuefrom2-wf_v1_3.cwl create mode 100755 testdata/step-valuefrom3-wf_v1_3.cwl create mode 100755 testdata/step_valuefrom5_wf_v1_3.cwl create mode 100755 testdata/step_valuefrom5_wf_with_id_v1_3.cwl create mode 100755 testdata/wc3-tool_v1_3.cwl rename testdata/{wf2.cwl => wf2.v1_2.cwl} (100%) create mode 100755 testdata/wf2.v1_3.cwl create mode 100755 testdata/workflow_input_format_expr_v1_3.cwl create mode 100755 testdata/workflow_input_sf_expr_array_v1_3.cwl create mode 100755 testdata/workflow_input_sf_expr_v1_3.cwl diff --git a/README.rst b/README.rst index b8fba953..03034f46 100644 --- a/README.rst +++ b/README.rst @@ -15,8 +15,10 @@ Python Utilities and Autogenerated Classes for loading and parsing `CWL v1.0 `__, `CWL v1.1 `__, +`CWL +v1.2 `__, and `CWL -v1.2 `__ +v1.3 `__, documents. Requires Python 3.8+ @@ -168,6 +170,9 @@ To regenerate install the ``schema_salad`` package and run: ``cwl_utils/parser/cwl_v1_2.py`` was created via ``schema-salad-tool --codegen python https://github.com/common-workflow-language/cwl-v1.2/raw/codegen/extensions.yml --codegen-parser-info "org.w3id.cwl.v1_2" > cwl_utils/parser/cwl_v1_2.py`` +``cwl_utils/parser/cwl_v1_3.py`` was created via +``schema-salad-tool --codegen python https://github.com/common-workflow-language/cwl-v1.3/raw/main/CommonWorkflowLanguage.yml --codegen-parser-info "org.w3id.cwl.v1_3" > cwl_utils/parser/cwl_v1_3.py`` + Release ~~~~~~~ diff --git a/cwl_utils/cwl_v1_3_expression_refactor.py b/cwl_utils/cwl_v1_3_expression_refactor.py new file mode 100755 index 00000000..cd5a5f18 --- /dev/null +++ b/cwl_utils/cwl_v1_3_expression_refactor.py @@ -0,0 +1,2254 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2018-2021 Michael R. Crusoe +"""CWL Expression refactoring tool for CWL v1.3 .""" +import copy +import hashlib +import uuid +from collections.abc import Mapping, MutableSequence, Sequence +from typing import Any, Optional, Union, cast + +from ruamel import yaml +from schema_salad.sourceline import SourceLine +from schema_salad.utils import json_dumps + +import cwl_utils.parser.cwl_v1_3 as cwl +import cwl_utils.parser.cwl_v1_3_utils as utils +from cwl_utils.errors import JavascriptException, WorkflowException +from cwl_utils.expression import do_eval, interpolate +from cwl_utils.types import CWLObjectType, CWLOutputType + + +def expand_stream_shortcuts(process: cwl.CommandLineTool) -> cwl.CommandLineTool: + """Rewrite the "type: stdout" shortcut to use an explicit random filename.""" + if not process.outputs: + return process + result = None + for index, output in enumerate(process.outputs): + if output.type_ == "stdout": # TODO: add 'stdin' for CWL v1.1 + if not result: + result = copy.deepcopy(process) + stdout_path = process.stdout + if not stdout_path: + stdout_path = str( + hashlib.sha1( # nosec + json_dumps(cwl.save(process)).encode("utf-8") + ).hexdigest() + ) + result.stdout = stdout_path + result.outputs[index].type_ = "File" + output.outputBinding = cwl.CommandOutputBinding(stdout_path, None, None) + if result: + return result + return process + + +def escape_expression_field(contents: str) -> str: + """Escape sequences similar to CWL expressions or param references.""" + return contents.replace("${", "$/{").replace("$(", "$/(") + + +def clean_type_ids( + cwltype: Union[cwl.ArraySchema, cwl.InputRecordSchema], +) -> Union[cwl.ArraySchema, cwl.InputRecordSchema]: + """Simplify type identifiers.""" + result = copy.deepcopy(cwltype) + if isinstance(result, cwl.ArraySchema): + if isinstance(result.items, MutableSequence): + for item in result.items: + if hasattr(item, "id"): + item.id = item.id.split("#")[-1] + elif isinstance(result.items, cwl.InputRecordSchema): + if result.items.name: + result.items.name = result.items.name.split("/")[-1] + if result.items.fields: + for field in result.items.fields: + field.name = field.name.split("/")[-1] + elif isinstance(result, cwl.InputRecordSchema): + if result.name: + result.name = result.name.split("/")[-1] + if result.fields: + for field in result.fields: + field.name = field.name.split("/")[-1] + return result + + +def get_expression( + string: str, inputs: CWLObjectType, self: Optional[CWLOutputType] +) -> Optional[str]: + """ + Find and return a normalized CWL expression, if any. + + CWL expressions in the $() form are converted to the ${} form. + """ + if not isinstance(string, str): + return None + if string.strip().startswith("${"): + return string + if "$(" in string: + runtime: CWLObjectType = { + "cores": 0, + "ram": 0, + "outdir": "/root", + "tmpdir": "/tmp", # nosec + "outdirSize": 0, + "tmpdirSize": 0, + } + try: + do_eval( + string, + inputs, + context=self, + requirements=[], + outdir="", + tmpdir="", + resources={}, + ) + except (WorkflowException, JavascriptException): + if ( + string[0:2] != "$(" + or not string.endswith(")") + or len(string.split("$(")) > 2 + ): + # then it is a string interpolation + return cast( + str, + interpolate( + scan=string, + rootvars={ + "inputs": inputs, + "context": self, + "runtime": runtime, + }, + fullJS=True, + escaping_behavior=2, + convert_to_expression=True, + ), + ) + else: + # it is a CWL Expression in $() with no string interpolation + return "${return " + string.strip()[2:-1] + ";}" + return None + + +def etool_to_cltool( + etool: cwl.ExpressionTool, expressionLib: Optional[list[str]] = None +) -> cwl.CommandLineTool: + """Convert a ExpressionTool to a CommandLineTool.""" + inputs = yaml.comments.CommentedSeq() # preserve the order + for inp in etool.inputs: + inputs.append( + cwl.CommandInputParameter( + id=inp.id, + label=inp.label, + secondaryFiles=inp.secondaryFiles, + streamable=inp.streamable, + doc=inp.doc, + format=inp.format, + default=inp.default, + type_=inp.type_, + extension_fields=inp.extension_fields, + loadingOptions=inp.loadingOptions, + ) + ) + outputs = yaml.comments.CommentedSeq() + for outp in etool.outputs: + outputs.append( + cwl.CommandOutputParameter( + id=outp.id, + label=outp.label, + secondaryFiles=outp.secondaryFiles, + streamable=outp.streamable, + doc=outp.doc, + format=outp.format, + type_=outp.type_, + extension_fields=outp.extension_fields, + loadingOptions=outp.loadingOptions, + ) + ) + contents = """"use strict"; +var inputs=$(inputs); +var runtime=$(runtime);""" + if expressionLib: + contents += "\n" + "\n".join(expressionLib) + contents += ( + """ +var ret = function(){""" + + escape_expression_field(etool.expression.strip()[2:-1]) + + """}(); +process.stdout.write(JSON.stringify(ret));""" + ) + listing = [cwl.Dirent(entryname="expression.js", entry=contents, writable=None)] + iwdr = cwl.InitialWorkDirRequirement(listing) + containerReq = cwl.DockerRequirement(dockerPull="node:alpine") + softwareHint = cwl.SoftwareRequirement( + packages=[cwl.SoftwarePackage(package="nodejs")] + ) + return cwl.CommandLineTool( + inputs=inputs, + outputs=outputs, + id=etool.id, + requirements=[iwdr], + hints=[containerReq, softwareHint], + label=etool.label, + doc=etool.doc, + cwlVersion=etool.cwlVersion, + baseCommand=["nodejs", "expression.js"], + stdout="cwl.output.json", + extension_fields=etool.extension_fields, + loadingOptions=etool.loadingOptions, + ) + + +def traverse( + process: Union[cwl.CommandLineTool, cwl.ExpressionTool, cwl.Workflow], + replace_etool: bool, + inside: bool, + skip_command_line1: bool, + skip_command_line2: bool, +) -> tuple[Union[cwl.CommandLineTool, cwl.ExpressionTool, cwl.Workflow], bool]: + """Convert the given process and any subprocesses.""" + if not inside and isinstance(process, cwl.CommandLineTool): + process = expand_stream_shortcuts(process) + wf_inputs = [] + wf_outputs = [] + step_inputs = [] + step_outputs = [] + if process.inputs: + for inp in process.inputs: + inp_id = inp.id.split("#")[-1] + step_inputs.append( + cwl.WorkflowStepInput( + id=inp_id, + source=inp_id, + extension_fields=inp.extension_fields, + loadingOptions=inp.loadingOptions, + ) + ) + wf_inputs.append( + cwl.WorkflowInputParameter( + id=inp_id, + label=inp.label, + secondaryFiles=inp.secondaryFiles, + streamable=inp.streamable, + doc=inp.doc, + format=inp.format, + default=inp.default, + type_=inp.type_, + extension_fields=inp.extension_fields, + loadingOptions=inp.loadingOptions, + ) + ) + if process.outputs: + for outp in process.outputs: + outp_id = outp.id.split("#")[-1] + step_outputs.append(outp_id) + wf_outputs.append( + cwl.WorkflowOutputParameter( + id=outp_id, + label=outp.label, + secondaryFiles=outp.secondaryFiles, + streamable=outp.streamable, + doc=outp.doc, + format=outp.format, + outputSource=f"main/{outp_id}", + type_=outp.type_, + extension_fields=outp.extension_fields, + loadingOptions=outp.loadingOptions, + ) + ) + step = cwl.WorkflowStep( + id="#main", + in_=step_inputs, + out=step_outputs, + run=copy.deepcopy(process), + ) + workflow = cwl.Workflow( + inputs=wf_inputs, + outputs=wf_outputs, + steps=[step], + cwlVersion=process.cwlVersion, + ) + result, modified = traverse_workflow( + workflow, replace_etool, skip_command_line1, skip_command_line2 + ) + if modified: + return result, True + else: + return process, False + if isinstance(process, cwl.ExpressionTool) and replace_etool: + expression = get_expression(process.expression, empty_inputs(process), None) + # Why call get_expression on an ExpressionTool? + # It normalizes the form of $() CWL expressions into the ${} style + if expression: + process2 = copy.deepcopy(process) + process2.expression = expression + else: + process2 = process + return etool_to_cltool(process2), True + if isinstance(process, cwl.Workflow): + return traverse_workflow( + process, replace_etool, skip_command_line1, skip_command_line2 + ) + return process, False + + +def load_step( + step: cwl.WorkflowStep, + replace_etool: bool, + skip_command_line1: bool, + skip_command_line2: bool, +) -> bool: + """If the step's Process is not inline, load and process it.""" + modified = False + if isinstance(step.run, str): + step.run, modified = traverse( + cwl.load_document(step.run, baseuri=step.loadingOptions.fileuri), + replace_etool, + True, + skip_command_line1, + skip_command_line2, + ) + return modified + + +def generate_etool_from_expr( + expr: str, + target: Union[cwl.CommandInputParameter, cwl.WorkflowInputParameter], + no_inputs: bool = False, + self_type: Optional[ + Union[ + cwl.WorkflowInputParameter, + cwl.CommandInputParameter, + list[Union[cwl.WorkflowInputParameter, cwl.CommandInputParameter]], + ] + ] = None, # if the "self" input should be a different type than the "result" output + extra_processes: Optional[ + Sequence[Union[cwl.Workflow, cwl.WorkflowStep, cwl.CommandLineTool]] + ] = None, +) -> cwl.ExpressionTool: + """Convert a CWL Expression into an ExpressionTool.""" + inputs = yaml.comments.CommentedSeq() + if not no_inputs: + if not self_type: + self_type = target + if isinstance(self_type, list): + new_type: Union[ + list[Union[cwl.ArraySchema, cwl.InputRecordSchema]], + Union[cwl.ArraySchema, cwl.InputRecordSchema], + ] = [clean_type_ids(t.type_) for t in self_type] + else: + new_type = clean_type_ids(self_type.type_) + inputs.append( + cwl.WorkflowInputParameter( + id="self", + label=self_type.label if not isinstance(self_type, list) else None, + secondaryFiles=( + self_type.secondaryFiles + if not isinstance(self_type, list) + else None + ), + streamable=( + self_type.streamable if not isinstance(self_type, list) else None + ), + doc=self_type.doc if not isinstance(self_type, list) else None, + format=self_type.format if not isinstance(self_type, list) else None, + type_=new_type, + extension_fields=( + self_type.extension_fields + if not isinstance(self_type, list) + else None + ), + loadingOptions=( + self_type.loadingOptions + if not isinstance(self_type, list) + else None + ), + ) + ) + outputs = yaml.comments.CommentedSeq() + outputs.append( + cwl.ExpressionToolOutputParameter( + id="result", + label=target.label, + secondaryFiles=target.secondaryFiles, + streamable=target.streamable, + doc=target.doc, + format=target.format, + type_=target.type_, + extension_fields=target.extension_fields, + loadingOptions=target.loadingOptions, + ) + ) + expression = "${" + if not no_inputs: + expression += "\n var self=inputs.self;" + expression += ( + """ + return {"result": function(){""" + + expr[2:-2] + + """}()}; + }""" + ) + inlineJSReq = cwl.InlineJavascriptRequirement( + find_expressionLib(extra_processes) if extra_processes else None + ) + return cwl.ExpressionTool( + id="_:" + str(uuid.uuid4()), + inputs=inputs, + outputs=outputs, + expression=expression, + requirements=[inlineJSReq], + cwlVersion="v1.0", + ) + + +def get_input_for_id( + name: str, tool: Union[cwl.CommandLineTool, cwl.Workflow] +) -> Optional[cwl.CommandInputParameter]: + """Determine the CommandInputParameter for the given input name.""" + name = name.split("/")[-1] + + for inp in cast(list[cwl.CommandInputParameter], tool.inputs): + if inp.id and inp.id.split("#")[-1].split("/")[-1] == name: + return inp + if isinstance(tool, cwl.Workflow) and "/" in name: + stepname, stem = name.split("/", 1) + for step in tool.steps: + if step.id == stepname: + result = get_input_for_id(stem, step.run) + if result: + return result + return None + + +def find_expressionLib( + processes: Sequence[ + Union[cwl.CommandLineTool, cwl.Workflow, cwl.ExpressionTool, cwl.WorkflowStep] + ], +) -> Optional[list[str]]: + """ + Return the expressionLib from the highest priority InlineJavascriptRequirement. + + processes: should be in order of least important to most important + (Workflow, WorkflowStep, ... CommandLineTool/ExpressionTool) + """ + for process in reversed(copy.copy(processes)): + if process.requirements: + for req in process.requirements: + if isinstance(req, cwl.InlineJavascriptRequirement): + return cast(Optional[list[str]], copy.deepcopy(req.expressionLib)) + return None + + +def replace_expr_with_etool( + expr: str, + name: str, + workflow: cwl.Workflow, + target: Union[cwl.CommandInputParameter, cwl.WorkflowInputParameter], + source: Optional[Union[str, list[Any]]], + replace_etool: bool = False, + extra_process: Optional[ + Union[cwl.Workflow, cwl.WorkflowStep, cwl.CommandLineTool] + ] = None, + source_type: Optional[cwl.CommandInputParameter] = None, +) -> None: + """Modify the given workflow, replacing the expr with an standalone ExpressionTool.""" + extra_processes: list[ + Union[cwl.Workflow, cwl.WorkflowStep, cwl.CommandLineTool] + ] = [workflow] + if extra_process: + extra_processes.append(extra_process) + etool: cwl.ExpressionTool = generate_etool_from_expr( + expr, target, source is None, source_type, extra_processes + ) + if replace_etool: + processes: list[Union[cwl.WorkflowStep, cwl.Workflow, cwl.CommandLineTool]] = [ + workflow + ] + if extra_process: + processes.append(extra_process) + final_tool: Union[cwl.ExpressionTool, cwl.CommandLineTool] = etool_to_cltool( + etool, find_expressionLib(processes) + ) + else: + final_tool = etool + inps = [] + if source: + inps.append(cwl.WorkflowStepInput(id="self", source=source)) + workflow.steps.append( + cwl.WorkflowStep( + id=name, + in_=inps, + out=[cwl.WorkflowStepOutput("result")], + run=final_tool, + ) + ) + + +def replace_wf_input_ref_with_step_output( + workflow: cwl.Workflow, name: str, target: str +) -> None: + """Refactor all reference to a workflow input to the specified step output.""" + if workflow.steps: + for step in workflow.steps: + if step.in_: + for inp in step.in_: + if inp.source: + if inp.source == name: + inp.source = target + if isinstance(inp.source, MutableSequence): + for index, source in enumerate(inp.source): + if source == name: + inp.source[index] = target + if workflow.outputs: + for outp in workflow.outputs: + if outp.outputSource: + if outp.outputSource == name: + outp.outputSource = target + if isinstance(outp.outputSource, MutableSequence): + for index, outputSource in enumerate(outp.outputSource): + if outputSource == name: + outp.outputSource[index] = target + + +def empty_inputs( + process_or_step: Union[ + cwl.CommandLineTool, cwl.WorkflowStep, cwl.ExpressionTool, cwl.Workflow + ], + parent: Optional[cwl.Workflow] = None, +) -> dict[str, Any]: + """Produce a mock input object for the given inputs.""" + result = {} + if isinstance(process_or_step, cwl.Process): + for param in process_or_step.inputs: + result[param.id.split("#")[-1]] = example_input(param.type_) + else: + for param in process_or_step.in_: + param_id = param.id.split("/")[-1] + if param.source is None and param.valueFrom: + result[param_id] = example_input("string") + elif param.source is None and param.default: + result[param_id] = param.default + else: + try: + result[param_id] = example_input( + utils.type_for_source(process_or_step.run, param.source, parent) + ) + except WorkflowException: + pass + return result + + +def example_input(some_type: Any) -> Any: + """Produce a fake input for the given type.""" + # TODO: accept some sort of context object with local custom type definitions + if some_type == "Directory": + return { + "class": "Directory", + "location": "https://www.example.com/example", + "basename": "example", + "listing": [ + { + "class": "File", + "basename": "example.txt", + "size": 23, + "contents": "hoopla", + "nameroot": "example", + "nameext": "txt", + } + ], + } + if some_type == "File": + return { + "class": "File", + "location": "https://www.example.com/example.txt", + "basename": "example.txt", + "size": 23, + "contents": "hoopla", + "nameroot": "example", + "nameext": "txt", + } + if some_type == "int": + return 23 + if some_type == "string": + return "hoopla!" + if some_type == "boolean": + return True + return None + + +EMPTY_FILE: CWLOutputType = { + "class": "File", + "basename": "em.pty", + "nameroot": "em", + "nameext": "pty", +} + +TOPLEVEL_SF_EXPR_ERROR = ( + "Input '{}'. Sorry, CWL Expressions as part of a secondaryFiles " + "specification in a Workflow level input or standalone CommandLine Tool " + "are not able to be refactored into separate ExpressionTool or " + "CommandLineTool steps." +) + +TOPLEVEL_FORMAT_EXPR_ERROR = ( + "Input '{}'. Sorry, CWL Expressions as part of a format " + "specification in a Workflow level input are not able to be refactored " + "into separate ExpressionTool/CommandLineTool steps." +) + +PICKVALUE_FIRST_NON_NULL_EXPR = """${ +for (let i = 0; i < self.length; i++) { + if (self[i] !== null){ + return self[i]; + } +} +throw 'pickValue=first_non_null, but no non-null value found: ' + self; +} +""" + +PICKVALUE_THE_ONLY_NON_NULL_EXPR = """${ +var found = null; +self.forEach(function(item) { + if (item !== null) { + if (found !== null) { + throw 'pickValue=the_only_non_null, but multiple non-null values found: ' + self; + } else { + found = item; + } + } +}) +if (found !== null) { + return found; +} +throw 'pickValue=the_only_non_null, but no non-null value found: ' + self; +} +""" + +PICKVALUE_ALL_NON_NULL_EXPR = """${ +var results = []; +self.forEach(function(item) { + if (item !== null){ + results.push(item); + } +}) +return results; +} +""" + +PICKVALUE_ERROR = ( + "pickValue '{}' is invalid. Should be one of 'first_non_null', " + "'the_only_non_null', 'all_non_null'" +) + + +def process_workflow_inputs_and_outputs( + workflow: cwl.Workflow, replace_etool: bool +) -> bool: + """Do any needed conversions on the given Workflow's inputs and outputs.""" + modified = False + inputs = empty_inputs(workflow) + for index, param in enumerate(workflow.inputs): + with SourceLine(workflow.inputs, index, WorkflowException): + if param.format and get_expression(param.format, inputs, None): + raise SourceLine( + param.loadingOptions.original_doc, + "format", + raise_type=WorkflowException, + ).makeError(TOPLEVEL_FORMAT_EXPR_ERROR.format(param.id.split("#")[-1])) + if param.secondaryFiles: + if hasattr(param.secondaryFiles, "pattern") and get_expression( + param.secondaryFiles.pattern, inputs, EMPTY_FILE + ): + raise SourceLine( + param.loadingOptions.original_doc, + "secondaryFiles", + raise_type=WorkflowException, + ).makeError(TOPLEVEL_SF_EXPR_ERROR.format(param.id.split("#")[-1])) + elif isinstance(param.secondaryFiles, MutableSequence): + for index2, entry in enumerate(param.secondaryFiles): + if get_expression(entry.pattern, inputs, EMPTY_FILE): + raise SourceLine( + param.loadingOptions.original_doc, + index2, + raise_type=WorkflowException, + ).makeError( + f"Entry {index}," + + TOPLEVEL_SF_EXPR_ERROR.format(param.id.split("#")[-1]) + ) + generated_pickValue_steps = [] + for index3, param2 in enumerate(workflow.outputs): + with SourceLine(workflow.outputs, index3, WorkflowException): + if param2.pickValue: + if param2.pickValue == "first_non_null": + expression = PICKVALUE_FIRST_NON_NULL_EXPR + elif param2.pickValue == "the_only_non_null": + expression = PICKVALUE_THE_ONLY_NON_NULL_EXPR + elif param2.pickValue == "all_non_null": + expression = PICKVALUE_ALL_NON_NULL_EXPR + else: + raise WorkflowException(PICKVALUE_ERROR.format(param2.pickValue)) + modified = True + + etool_id = "_pickValue_workflow_step_{}".format( + param2.id.split("#")[-1] + ) + target_type = copy.deepcopy(param2.type_) + if isinstance(target_type, cwl.OutputArraySchema): + target_type.name = "" + target = cwl.WorkflowInputParameter(id=None, type_=target_type) + if not isinstance(param2.outputSource, list): + sources = param2.outputSource.split("#")[-1] + else: + sources = [s.split("#")[-1] for s in param2.outputSource] + source_type_items = utils.type_for_source(workflow, sources) + if isinstance(source_type_items, cwl.ArraySchema): + if isinstance(source_type_items.items, list): + if "null" not in source_type_items.items: + source_type_items.items.append("null") + elif source_type_items.items != "null": + source_type_items.items = ["null", source_type_items.items] + elif isinstance(source_type_items, list): + if "null" not in source_type_items: + source_type_items.append("null") + elif source_type_items != "null": + source_type_items = ["null", source_type_items] + source_type = cwl.CommandInputParameter(type_=source_type_items) + replace_expr_with_etool( + expression, + etool_id, + workflow, + target, + sources, + replace_etool, + None, + source_type, + ) + param2.outputSource = f"{etool_id}/result" + param2.pickValue = None + generated_pickValue_steps.append(etool_id) + return modified + + +def process_workflow_reqs_and_hints( + workflow: cwl.Workflow, replace_etool: bool +) -> bool: + """ + Convert any expressions in a workflow's reqs and hints. + + Each expression will be converted to an additional step. + The converted requirement will be copied to all workflow steps that don't have that + requirement type. Those affected steps will gain an additional input from the relevant + synthesized expression step. + """ + # TODO: consolidate the generated etools/cltools into a single "_expression_workflow_reqs" step + # TODO: support resourceReq.* references to Workflow.inputs? + # ^ By refactoring replace_expr_etool to allow multiple inputs, + # and connecting all workflow inputs to the generated step + modified = False + inputs = empty_inputs(workflow) + generated_res_reqs: list[tuple[str, Union[int, str]]] = [] + generated_iwdr_reqs: list[tuple[str, Union[int, str]]] = [] + generated_envVar_reqs: list[tuple[str, Union[int, str]]] = [] + prop_reqs: tuple[ + Union[ + type[cwl.EnvVarRequirement], + type[cwl.ResourceRequirement], + type[cwl.InitialWorkDirRequirement], + ], + ..., + ] = () + resourceReq: Optional[cwl.ResourceRequirement] = None + envVarReq: Optional[cwl.EnvVarRequirement] = None + iwdr: Optional[cwl.InitialWorkDirRequirement] = None + if workflow.requirements is not None: + for req in cast(list[cwl.ProcessRequirement], workflow.requirements): + if req and isinstance(req, cwl.EnvVarRequirement): + if req.envDef: + for index, envDef in enumerate(req.envDef): + if envDef.envValue: + expression = get_expression(envDef.envValue, inputs, None) + if expression: + modified = True + target = cwl.WorkflowInputParameter( + id=None, + type_="string", + ) + etool_id = ( + "_expression_workflow_EnvVarRequirement_{}".format( + index + ) + ) + replace_expr_with_etool( + expression, + etool_id, + workflow, + target, + None, + replace_etool, + ) + if envVarReq is None: + envVarReq = copy.deepcopy(req) + prop_reqs += (cwl.EnvVarRequirement,) + newEnvDef = copy.deepcopy(envDef) + newEnvDef.envValue = f"$(inputs._envDef{index})" + envVarReq.envDef[index] = newEnvDef + generated_envVar_reqs.append((etool_id, index)) + if req and isinstance(req, cwl.ResourceRequirement): + for attr in cwl.ResourceRequirement.attrs: + this_attr = getattr(req, attr, None) + if this_attr: + expression = get_expression(this_attr, inputs, None) + if expression: + modified = True + target = cwl.WorkflowInputParameter(id=None, type_="long") + etool_id = ( + "_expression_workflow_ResourceRequirement_{}".format( + attr + ) + ) + replace_expr_with_etool( + expression, + etool_id, + workflow, + target, + None, + replace_etool, + ) + if not resourceReq: + resourceReq = cwl.ResourceRequirement( + loadingOptions=workflow.loadingOptions, + ) + prop_reqs += (cwl.ResourceRequirement,) + setattr(resourceReq, attr, f"$(inputs._{attr})") + generated_res_reqs.append((etool_id, attr)) + if req and isinstance(req, cwl.InitialWorkDirRequirement): + if req.listing: + if isinstance(req.listing, str): + expression = get_expression(req.listing, inputs, None) + if expression: + modified = True + target = cwl.WorkflowInputParameter( + id=None, + type_=cwl.InputArraySchema( + ["File", "Directory"], "array", None, None + ), + ) + etool_id = "_expression_workflow_InitialWorkDirRequirement" + replace_expr_with_etool( + expression, + etool_id, + workflow, + target, + None, + replace_etool, + ) + iwdr = cwl.InitialWorkDirRequirement( + listing="$(inputs._iwdr_listing)", + loadingOptions=workflow.loadingOptions, + ) + prop_reqs += (cwl.InitialWorkDirRequirement,) + else: + iwdr = copy.deepcopy(req) + for index, entry in enumerate(req.listing): + expression = get_expression(entry, inputs, None) + if expression: + modified = True + target = cwl.WorkflowInputParameter( + id=None, + type_=cwl.InputArraySchema( + ["File", "Directory"], "array", None, None + ), + ) + etool_id = "_expression_workflow_InitialWorkDirRequirement_{}".format( + index + ) + replace_expr_with_etool( + expression, + etool_id, + workflow, + target, + None, + replace_etool, + ) + iwdr.listing[index] = f"$(inputs._iwdr_listing_{index}" + generated_iwdr_reqs.append((etool_id, index)) + elif isinstance(entry, cwl.Dirent): + if entry.entry: + expression = get_expression( + entry.entry, inputs, None + ) + if expression: + expr: str = expression + expr_result = do_eval( + ex=entry.entry, + jobinput=inputs, + requirements=[], + outdir="", + tmpdir="", + resources={}, + ) + modified = True + if ( + isinstance(expr_result, Mapping) + and "class" in expr_result + and ( + expr_result["class"] == "File" + or expr_result["class"] == "Directory" + ) + ): + target = cwl.WorkflowInputParameter( + id=None, + type_=expr_result["class"], + ) + replace_expr_with_etool( + expr, + etool_id, + workflow, + target, + None, + replace_etool, + ) + iwdr.listing[index] = ( + "$(inputs._iwdr_listing_{}".format( + index + ) + ) + generated_iwdr_reqs.append( + (etool_id, index) + ) + elif isinstance(expr_result, str): + target = cwl.WorkflowInputParameter( + id=None, + type_=["File"], + ) + if entry.entryname is None: + raise SourceLine( + entry.loadingOptions.original_doc, + index, + raise_type=WorkflowException, + ).makeError( + f"Entry {index}," + + "Invalid CWL, if 'entry' " + "is a string, then entryName must be specified." + ) + expr = ( + '${return {"class": "File", "basename": "' + + entry.entryname + + '", "contents": (function(){' + + expr[2:-1] + + "})() }; }" + ) + etool_id = "_expression_workflow_InitialWorkDirRequirement_{}".format( + index + ) + replace_expr_with_etool( + expr, + etool_id, + workflow, + target, + None, + replace_etool, + ) + iwdr.listing[index] = ( + f"$(inputs._iwdr_listing_{index}" + ) + generated_iwdr_reqs.append((etool_id, index)) + + elif entry.entryname: + expression = get_expression( + entry.entryname, inputs, None + ) + if expression: + modified = True + target = cwl.WorkflowInputParameter( + id=None, + type_="string", + ) + etool_id = "_expression_workflow_InitialWorkDirRequirement_{}".format( + index + ) + replace_expr_with_etool( + expression, + etool_id, + workflow, + target, + None, + replace_etool, + ) + iwdr.listing[index] = ( + f"$(inputs._iwdr_listing_{index}" + ) + generated_iwdr_reqs.append((etool_id, index)) + if generated_iwdr_reqs: + prop_reqs += (cwl.InitialWorkDirRequirement,) + else: + iwdr = None + if envVarReq and workflow.steps: + for step in workflow.steps: + if step.id.split("#")[-1].startswith("_expression_"): + continue + if step.requirements: + for req in step.requirements: + if isinstance(req, cwl.EnvVarRequirement): + continue + else: + step.requirements = yaml.comments.CommentedSeq() + step.requirements.append(envVarReq) + for entry in generated_envVar_reqs: + step.in_.append( + cwl.WorkflowStepInput( + id=f"_envDef{entry[1]}", + source=f"{entry[0]}/result", + ) + ) + + if resourceReq and workflow.steps: + for step in workflow.steps: + if step.id.split("#")[-1].startswith("_expression_"): + continue + if step.requirements: + for req in step.requirements: + if isinstance(req, cwl.ResourceRequirement): + continue + else: + step.requirements = yaml.comments.CommentedSeq() + step.requirements.append(resourceReq) + for entry in generated_res_reqs: + step.in_.append( + cwl.WorkflowStepInput( + id=f"_{entry[1]}", + source=f"{entry[0]}/result", + ) + ) + + if iwdr and workflow.steps: + for step in workflow.steps: + if step.id.split("#")[-1].startswith("_expression_"): + continue + if step.requirements: + for req in step.requirements: + if isinstance(req, cwl.InitialWorkDirRequirement): + continue + else: + step.requirements = yaml.comments.CommentedSeq() + step.requirements.append(iwdr) + if generated_iwdr_reqs: + for entry in generated_iwdr_reqs: + step.in_.append( + cwl.WorkflowStepInput( + id=f"_iwdr_listing_{index}", + source=f"{entry[0]}/result", + ) + ) + else: + step.in_.append( + cwl.WorkflowStepInput( + id="_iwdr_listing", + source="_expression_workflow_InitialWorkDirRequirement/result", + ) + ) + + if workflow.requirements: + workflow.requirements[:] = [ + x for x in workflow.requirements if not isinstance(x, prop_reqs) + ] + return modified + + +def process_level_reqs( + process: cwl.CommandLineTool, + step: cwl.WorkflowStep, + parent: cwl.Workflow, + replace_etool: bool, + skip_command_line1: bool, + skip_command_line2: bool, +) -> bool: + """Convert expressions inside a process into new adjacent steps.""" + # This is for reqs inside a Process (CommandLineTool, ExpressionTool) + # differences from process_workflow_reqs_and_hints() are: + # - the name of the generated ETools/CTools contains the name of the step, not "workflow" + # - Generated ETools/CTools are adjacent steps + # - Replace the CWL Expression inplace with a CWL parameter reference + # - Don't create a new Requirement, nor delete the existing Requirement + # - the Process is passed to replace_expr_with_etool for later searching for JS expressionLibs + # - in addition to adding the input to the step for the ETool/CTool result, + # add it to the Process.inputs as well + if not process.requirements: + return False + modified = False + target_process = step.run + inputs = empty_inputs(process) + generated_res_reqs: list[tuple[str, str]] = [] + generated_iwdr_reqs: list[tuple[str, Union[int, str], Any]] = [] + generated_envVar_reqs: list[tuple[str, Union[int, str]]] = [] + if not step.id: + return False + step_name = step.id.split("#", 1)[-1] + for req_index, req in enumerate(process.requirements): + if req and isinstance(req, cwl.EnvVarRequirement): + if req.envDef: + for env_index, envDef in enumerate(req.envDef): + if envDef.envValue: + expression = get_expression(envDef.envValue, inputs, None) + if expression: + modified = True + target = cwl.WorkflowInputParameter(id=None, type_="string") + etool_id = "_expression_{}_EnvVarRequirement_{}".format( + step_name, env_index + ) + replace_expr_with_etool( + expression, + etool_id, + parent, + target, + None, + replace_etool, + process, + ) + target_process.requirements[req_index][ + env_index + ].envValue = f"$(inputs._envDef{env_index})" + generated_envVar_reqs.append((etool_id, env_index)) + if req and isinstance(req, cwl.ResourceRequirement): + for attr in cwl.ResourceRequirement.attrs: + this_attr = getattr(req, attr, None) + if this_attr: + expression = get_expression(this_attr, inputs, None) + if expression: + modified = True + target = cwl.WorkflowInputParameter(id=None, type_="long") + etool_id = "_expression_{}_ResourceRequirement_{}".format( + step_name, attr + ) + replace_clt_hintreq_expr_with_etool( + expression, etool_id, parent, target, step, replace_etool + ) + setattr( + target_process.requirements[req_index], + attr, + f"$(inputs._{attr})", + ) + generated_res_reqs.append((etool_id, attr)) + + if ( + not skip_command_line2 + and req + and isinstance(req, cwl.InitialWorkDirRequirement) + ): + if req.listing: + if isinstance(req.listing, str): + expression = get_expression(req.listing, inputs, None) + if expression: + modified = True + target_type = cwl.InputArraySchema( + ["File", "Directory"], "array", None, None + ) + target = cwl.WorkflowInputParameter(id=None, type_=target_type) + etool_id = "_expression_{}_InitialWorkDirRequirement".format( + step_name + ) + replace_expr_with_etool( + expression, + etool_id, + parent, + target, + None, + replace_etool, + process, + ) + target_process.requirements[req_index].listing = ( + "$(inputs._iwdr_listing)", + ) + step.in_.append( + cwl.WorkflowStepInput( + id="_iwdr_listing", + source=f"{etool_id}/result", + ) + ) + add_input_to_process( + target_process, + "_iwdr_listing", + target_type, + process.loadingOptions, + ) + else: + for listing_index, entry in enumerate(req.listing): + expression = get_expression(entry, inputs, None) + if expression: + modified = True + target_type = cwl.InputArraySchema( + ["File", "Directory"], "array", None, None + ) + target = cwl.WorkflowInputParameter( + id=None, + type_=target_type, + ) + etool_id = ( + "_expression_{}_InitialWorkDirRequirement_{}".format( + step_name, listing_index + ) + ) + replace_expr_with_etool( + expression, + etool_id, + parent, + target, + None, + replace_etool, + process, + ) + target_process.requirements[req_index].listing[ + listing_index + ] = f"$(inputs._iwdr_listing_{listing_index}" + generated_iwdr_reqs.append( + (etool_id, listing_index, target_type) + ) + elif isinstance(entry, cwl.Dirent): + if entry.entry: + expression = get_expression(entry.entry, inputs, None) + if expression: + modified = True + if entry.entryname is not None: + entryname_expr = get_expression( + entry.entryname, inputs, None + ) + entryname = ( + entry.entryname + if entryname_expr + else f'"{entry.entryname}"' # noqa: B907 + ) + new_expression = ( + "${var result; var entryname = " + + entryname + + "; var entry = " + + entry.entry[2:-1] + + """; +if (typeof entry === 'string' || entry instanceof String) { +result = {"class": "File", "basename": entryname, "contents": entry} ; +if (typeof entryname === 'string' || entryname instanceof String) { +result.basename = entryname ; +} +} else { +result = entry ; +} +return result; }""" + ) + else: + new_expression = expression + d_target_type = ["File", "Directory"] + target = cwl.WorkflowInputParameter( + id=None, + type_=d_target_type, + ) + etool_id = "_expression_{}_InitialWorkDirRequirement_{}".format( + step_name, listing_index + ) + + replace_clt_hintreq_expr_with_etool( + new_expression, + etool_id, + parent, + target, + step, + replace_etool, + ) + target_process.requirements[req_index].listing[ + listing_index + ].entry = "$(inputs._iwdr_listing_{})".format( + listing_index + ) + generated_iwdr_reqs.append( + (etool_id, listing_index, d_target_type) + ) + elif entry.entryname: + expression = get_expression( + entry.entryname, inputs, None + ) + if expression: + modified = True + target = cwl.WorkflowInputParameter( + id=None, + type_="string", + ) + etool_id = "_expression_{}_InitialWorkDirRequirement_{}".format( + step_name, listing_index + ) + replace_expr_with_etool( + expression, + etool_id, + parent, + target, + None, + replace_etool, + process, + ) + target_process.requirements[req_index].listing[ + listing_index + ].entryname = "$(inputs._iwdr_listing_{})".format( + listing_index + ) + generated_iwdr_reqs.append( + (etool_id, listing_index, "string") + ) + for entry in generated_envVar_reqs: + name = f"_envDef{entry[1]}" + step.in_.append(cwl.WorkflowStepInput(id=name, source=f"{entry[0]}/result")) + add_input_to_process(target_process, name, "string", process.loadingOptions) + for entry in generated_res_reqs: + name = f"_{entry[1]}" + step.in_.append(cwl.WorkflowStepInput(id=name, source=f"{entry[0]}/result")) + add_input_to_process(target_process, name, "long", process.loadingOptions) + for entry in generated_iwdr_reqs: + name = f"_iwdr_listing_{entry[1]}" + step.in_.append(cwl.WorkflowStepInput(id=name, source=f"{entry[0]}/result")) + add_input_to_process(target_process, name, entry[2], process.loadingOptions) + return modified + + +def add_input_to_process( + process: cwl.Process, name: str, inptype: Any, loadingOptions: cwl.LoadingOptions +) -> None: + """Add a new InputParameter to the given CommandLineTool.""" + if isinstance(process, cwl.CommandLineTool): + process.inputs.append( + cwl.CommandInputParameter( + id=name, + type_=inptype, + loadingOptions=loadingOptions, + ) + ) + + +def traverse_CommandLineTool( + clt: cwl.CommandLineTool, + parent: cwl.Workflow, + step: cwl.WorkflowStep, + replace_etool: bool, + skip_command_line1: bool, + skip_command_line2: bool, +) -> bool: + """Extract any CWL Expressions within the given CommandLineTool into sibling steps.""" + modified = False + # don't modify clt, modify step.run + target_clt = step.run + inputs = empty_inputs(clt) + if not step.id: + return False + step_id = step.id.split("#")[-1] + if clt.arguments and not skip_command_line1: + for index, arg in enumerate(clt.arguments): + if isinstance(arg, str): + expression = get_expression(arg, inputs, None) + if expression: + modified = True + inp_id = f"_arguments_{index}" + etool_id = f"_expression_{step_id}{inp_id}" + target_type = "Any" + target = cwl.WorkflowInputParameter(id=None, type_=target_type) + replace_step_clt_expr_with_etool( + expression, etool_id, parent, target, step, replace_etool + ) + target_clt.arguments[index] = cwl.CommandLineBinding( + valueFrom=f"$(inputs.{inp_id})" + ) + target_clt.inputs.append( + cwl.CommandInputParameter( + id=inp_id, + type_=target_type, + ) + ) + step.in_.append( + cwl.WorkflowStepInput( + f"{etool_id}/result", None, inp_id, None, None + ) + ) + remove_JSReq(target_clt, skip_command_line1) + elif isinstance(arg, cwl.CommandLineBinding) and arg.valueFrom: + expression = get_expression(arg.valueFrom, inputs, None) + if expression: + modified = True + inp_id = f"_arguments_{index}" + etool_id = f"_expression_{step_id}{inp_id}" + target_type = "Any" + target = cwl.WorkflowInputParameter(id=None, type_=target_type) + replace_step_clt_expr_with_etool( + expression, etool_id, parent, target, step, replace_etool + ) + target_clt.arguments[index].valueFrom = "$(inputs.{})".format( + inp_id + ) + target_clt.inputs.append( + cwl.CommandInputParameter( + id=inp_id, + type_=target_type, + ) + ) + step.in_.append( + cwl.WorkflowStepInput(id=inp_id, source=f"{etool_id}/result") + ) + remove_JSReq(target_clt, skip_command_line1) + for streamtype in "stdout", "stderr": # add 'stdin' for v1.1 version + stream_value = getattr(clt, streamtype) + if stream_value: + expression = get_expression(stream_value, inputs, None) + if expression: + modified = True + inp_id = f"_{streamtype}" + etool_id = f"_expression_{step_id}{inp_id}" + target_type = "string" + target = cwl.WorkflowInputParameter(id=None, type_=target_type) + replace_step_clt_expr_with_etool( + expression, etool_id, parent, target, step, replace_etool + ) + setattr(target_clt, streamtype, f"$(inputs.{inp_id})") + target_clt.inputs.append( + cwl.CommandInputParameter(id=inp_id, type_=target_type) + ) + step.in_.append( + cwl.WorkflowStepInput(id=inp_id, source=f"{etool_id}/result") + ) + for inp in clt.inputs: + if not skip_command_line1 and inp.inputBinding and inp.inputBinding.valueFrom: + expression = get_expression( + inp.inputBinding.valueFrom, inputs, example_input(inp.type_) + ) + if expression: + modified = True + self_id = inp.id.split("#")[-1] + inp_id = f"_{self_id}_valueFrom" + etool_id = f"_expression_{step_id}{inp_id}" + replace_step_clt_expr_with_etool( + expression, etool_id, parent, inp, step, replace_etool, self_id + ) + inp.inputBinding.valueFrom = f"$(inputs.{inp_id})" + target_clt.inputs.append( + cwl.CommandInputParameter(id=inp_id, type_=inp.type_) + ) + step.in_.append( + cwl.WorkflowStepInput(id=inp_id, source=f"{etool_id}/result") + ) + for outp in clt.outputs: + if outp.outputBinding: + if outp.outputBinding.glob: + expression = get_expression(outp.outputBinding.glob, inputs, None) + if expression: + modified = True + inp_id = "_{}_glob".format(outp.id.split("#")[-1]) + etool_id = f"_expression_{step_id}{inp_id}" + glob_target_type = ["string", cwl.ArraySchema("string", "array")] + target = cwl.WorkflowInputParameter(id=None, type_=glob_target_type) + replace_step_clt_expr_with_etool( + expression, etool_id, parent, target, step, replace_etool + ) + outp.outputBinding.glob = f"$(inputs.{inp_id})" + target_clt.inputs.append( + cwl.CommandInputParameter( + id=inp_id, + type_=glob_target_type, + ) + ) + step.in_.append( + cwl.WorkflowStepInput(id=inp_id, source=f"{etool_id}/result") + ) + if outp.outputBinding.outputEval and not skip_command_line2: + self: CWLOutputType = [ + { + "class": "File", + "basename": "base.name", + "nameroot": "base", + "nameext": "name", + "path": "/tmp/base.name", # nosec + "dirname": "/tmp", # nosec + } + ] + if outp.outputBinding.loadContents: + cast(dict[Any, Any], self)[0]["contents"] = "stuff" + expression = get_expression(outp.outputBinding.outputEval, inputs, self) + if expression: + modified = True + outp_id = outp.id.split("#")[-1] + inp_id = f"_{outp_id}_outputEval" + etool_id = f"expression{inp_id}" + sub_wf_outputs = cltool_step_outputs_to_workflow_outputs( + step, etool_id, outp_id + ) + self_type = cwl.WorkflowInputParameter( + id=None, + type_=cwl.InputArraySchema("File", "array", None, None), + ) + etool = generate_etool_from_expr( + expression, outp, False, self_type, [clt, step, parent] + ) + if outp.outputBinding.loadContents: + etool.inputs[0].type_.inputBinding = cwl.CommandLineBinding( + loadContents=True + ) + etool.inputs.extend(cltool_inputs_to_etool_inputs(clt)) + sub_wf_inputs = cltool_inputs_to_etool_inputs(clt) + orig_step_inputs = copy.deepcopy(step.in_) + for orig_step_input in orig_step_inputs: + orig_step_input.id = orig_step_input.id.split("/")[-1] + if isinstance(orig_step_input.source, MutableSequence): + for index, source in enumerate(orig_step_input.source): + orig_step_input.source[index] = source.split("#")[-1] + else: + orig_step_input.source = orig_step_input.source.split("#")[ + -1 + ] + orig_step_inputs[:] = [ + x for x in orig_step_inputs if not x.id.startswith("_") + ] + for inp in orig_step_inputs: + inp.source = inp.id + inp.linkMerge = None + if replace_etool: + processes = [parent] + final_etool: Union[cwl.CommandLineTool, cwl.ExpressionTool] = ( + etool_to_cltool(etool, find_expressionLib(processes)) + ) + else: + final_etool = etool + if isinstance(step, cwl.ScatterWorkflowStep): + etool_step = cwl.ScatterWorkflowStep( + id=etool_id, + in_=orig_step_inputs, + out=[cwl.WorkflowStepOutput("result")], + run=final_etool, + scatterMethod=step.scatterMethod, + ) + else: + etool_step = cwl.WorkflowStep( + id=etool_id, + in_=orig_step_inputs, + out=[cwl.WorkflowStepOutput("result")], + run=final_etool, + ) + new_clt_step = copy.copy( + step + ) # a deepcopy would be convenient, but params2.cwl gives it problems + new_clt_step.id = new_clt_step.id.split("#")[-1] + new_clt_step.run = copy.copy(step.run) + new_clt_step.run.id = None + remove_JSReq(new_clt_step.run, skip_command_line1) + for new_outp in new_clt_step.run.outputs: + if new_outp.id.split("#")[-1] == outp_id: + if isinstance( + new_outp, + ( + cwl.WorkflowOutputParameter, + cwl.ExpressionToolOutputParameter, + ), + ): + new_outp.type_ = cwl.OutputArraySchema( + items="File", type_="array" + ) + elif isinstance(new_outp, cwl.CommandOutputParameter): + if new_outp.outputBinding: + new_outp.outputBinding.outputEval = None + new_outp.outputBinding.loadContents = None + new_outp.type_ = cwl.CommandOutputArraySchema( + items="File", + type_="array", + ) + else: + raise Exception( + "Unimplemented OutputParameter type: %s", + type(new_outp), + ) + new_clt_step.in_ = copy.deepcopy(step.in_) + for inp in new_clt_step.in_: + inp.id = inp.id.split("/")[-1] + inp.source = inp.id + inp.linkMerge = None + for index, out in enumerate(new_clt_step.out): + new_clt_step.out[index] = out.split("/")[-1] + for tool_inp in new_clt_step.run.inputs: + tool_inp.id = tool_inp.id.split("#")[-1] + for tool_out in new_clt_step.run.outputs: + tool_out.id = tool_out.id.split("#")[-1] + sub_wf_steps = [new_clt_step, etool_step] + sub_workflow = cwl.Workflow( + inputs=sub_wf_inputs, + outputs=sub_wf_outputs, + steps=sub_wf_steps, + cwlVersion=parent.cwlVersion, + ) + if isinstance(step, cwl.ScatterWorkflowStep): + new_clt_step.scatter = None + step.run = sub_workflow + rename_step_source( + sub_workflow, + f"{step_id}/{outp_id}", + f"{etool_id}/result", + ) + orig_step_inputs.append( + cwl.WorkflowStepInput(id="self", source=f"{step_id}/{outp_id}") + ) + if not parent.requirements: + parent.requirements = [cwl.SubworkflowFeatureRequirement()] + else: + has_sub_wf_req = False + for req in parent.requirements: + if isinstance(req, cwl.SubworkflowFeatureRequirement): + has_sub_wf_req = True + if not has_sub_wf_req: + parent.requirements.append( + cwl.SubworkflowFeatureRequirement() + ) + return modified + + +def rename_step_source(workflow: cwl.Workflow, old: str, new: str) -> None: + """Update step source names to the new name.""" + + def simplify_wf_id(uri: str) -> str: + return uri.split("#")[-1].split("/", 1)[1] + + def simplify_step_id(uri: str) -> str: + return uri.split("#")[-1] + + for wf_outp in workflow.outputs: + if wf_outp.outputSource and simplify_wf_id(wf_outp.outputSource) == old: + wf_outp.outputSource = new + for step in workflow.steps: + if step.in_: + for inp in step.in_: + if inp.source: + if isinstance(inp.source, str): + source_id = ( + simplify_step_id(inp.source) + if "#" in inp.source + else inp.source + ) + if source_id == old: + inp.source = new + else: + for index, source in enumerate(inp.source): + if simplify_step_id(source) == old: + inp.source[index] = new + + +def remove_JSReq( + process: Union[cwl.CommandLineTool, cwl.WorkflowStep, cwl.Workflow], + skip_command_line1: bool, +) -> None: + """Since the InlineJavascriptRequirement is longer needed, remove it.""" + if skip_command_line1 and isinstance(process, cwl.CommandLineTool): + return + if process.hints: + process.hints[:] = [ + hint + for hint in process.hints + if not isinstance(hint, cwl.InlineJavascriptRequirement) + ] + if not process.hints: + process.hints = None + if process.requirements: + process.requirements[:] = [ + req + for req in process.requirements + if not isinstance(req, cwl.InlineJavascriptRequirement) + ] + if not process.requirements: + process.requirements = None + + +def replace_step_clt_expr_with_etool( + expr: str, + name: str, + workflow: cwl.Workflow, + target: cwl.WorkflowInputParameter, + step: cwl.WorkflowStep, + replace_etool: bool, + self_name: Optional[str] = None, +) -> None: + """Convert a step level CWL Expression to a sibling expression step.""" + etool_inputs = cltool_inputs_to_etool_inputs(step.run) + temp_etool = generate_etool_from_expr2( + expr, target, etool_inputs, self_name, step.run, [workflow] + ) + if replace_etool: + processes = [workflow] + etool: Union[cwl.ExpressionTool, cwl.CommandLineTool] = etool_to_cltool( + temp_etool, find_expressionLib(processes) + ) + else: + etool = temp_etool + wf_step_inputs = copy.deepcopy(step.in_) + for wf_step_input in wf_step_inputs: + wf_step_input.id = wf_step_input.id.split("/")[-1] + wf_step_inputs[:] = [x for x in wf_step_inputs if not x.id.startswith("_")] + workflow.steps.append( + cwl.WorkflowStep( + id=name, + in_=wf_step_inputs, + out=[cwl.WorkflowStepOutput("result")], + run=etool, + ) + ) + + +def replace_clt_hintreq_expr_with_etool( + expr: str, + name: str, + workflow: cwl.Workflow, + target: cwl.WorkflowInputParameter, + step: cwl.WorkflowStep, + replace_etool: bool, + self_name: Optional[str] = None, +) -> Union[cwl.CommandLineTool, cwl.ExpressionTool]: + """Factor out an expression inside a CommandLineTool req or hint into a sibling step.""" + # Same as replace_step_clt_expr_with_etool or different? + etool_inputs = cltool_inputs_to_etool_inputs(step.run) + temp_etool = generate_etool_from_expr2( + expr, target, etool_inputs, self_name, step.run, [workflow] + ) + if replace_etool: + processes = [workflow] + etool: Union[cwl.CommandLineTool, cwl.ExpressionTool] = etool_to_cltool( + temp_etool, find_expressionLib(processes) + ) + else: + etool = temp_etool + wf_step_inputs = copy.deepcopy(step.in_) + for wf_step_input in wf_step_inputs: + wf_step_input.id = wf_step_input.id.split("/")[-1] + wf_step_inputs[:] = [x for x in wf_step_inputs if not x.id.startswith("_")] + workflow.steps.append( + cwl.WorkflowStep( + id=name, + in_=wf_step_inputs, + out=[cwl.WorkflowStepOutput("result")], + run=etool, + ) + ) + return etool + + +def cltool_inputs_to_etool_inputs( + tool: cwl.CommandLineTool, +) -> list[cwl.WorkflowInputParameter]: + """Copy CommandLineTool input objects into the equivalent ExpressionTool input objects.""" + inputs = yaml.comments.CommentedSeq() + if tool.inputs: + for clt_inp in tool.inputs: + clt_inp_id = clt_inp.id.split("#")[-1].split("/")[-1] + if not clt_inp_id.startswith("_"): + inputs.append( + cwl.WorkflowInputParameter( + id=clt_inp_id, + label=clt_inp.label, + secondaryFiles=clt_inp.secondaryFiles, + streamable=clt_inp.streamable, + doc=clt_inp.doc, + format=clt_inp.format, + default=clt_inp.default, + type_=clt_inp.type_, + extension_fields=clt_inp.extension_fields, + loadingOptions=clt_inp.loadingOptions, + ) + ) + return inputs + + +def cltool_step_outputs_to_workflow_outputs( + cltool_step: cwl.WorkflowStep, etool_step_id: str, etool_out_id: str +) -> list[cwl.OutputParameter]: + """ + Copy CommandLineTool outputs into the equivalent Workflow output parameters. + + Connects the outputSources for each of the new output parameters to the step + they came from. + """ + outputs = yaml.comments.CommentedSeq() + if not cltool_step.id: + raise WorkflowException(f"Missing step id from {cltool_step}.") + default_step_id = cltool_step.id.split("#")[-1] + if cltool_step.run.outputs: + for clt_out in cltool_step.run.outputs: + clt_out_id = clt_out.id.split("#")[-1].split("/")[-1] + if clt_out_id == etool_out_id: + outputSource = f"{etool_step_id}/result" + else: + outputSource = f"{default_step_id}/{clt_out_id}" + if not clt_out_id.startswith("_"): + outputs.append( + cwl.WorkflowOutputParameter( + id=clt_out_id, + label=clt_out.label, + secondaryFiles=clt_out.secondaryFiles, + streamable=clt_out.streamable, + doc=clt_out.doc, + format=clt_out.format, + outputSource=outputSource, + type_=clt_out.type_, + extension_fields=clt_out.extension_fields, + loadingOptions=clt_out.loadingOptions, + ) + ) + return outputs + + +def generate_etool_from_expr2( + expr: str, + target: Union[cwl.CommandInputParameter, cwl.WorkflowInputParameter], + inputs: Sequence[Union[cwl.WorkflowInputParameter, cwl.CommandInputParameter]], + self_name: Optional[str] = None, + process: Optional[Union[cwl.CommandLineTool, cwl.ExpressionTool]] = None, + extra_processes: Optional[ + Sequence[Union[cwl.Workflow, cwl.WorkflowStep, cwl.CommandLineTool]] + ] = None, +) -> cwl.ExpressionTool: + """Generate an ExpressionTool to achieve the same result as the given expression.""" + outputs = yaml.comments.CommentedSeq() + outputs.append( + cwl.ExpressionToolOutputParameter( + id="result", + label=target.label, + secondaryFiles=target.secondaryFiles, + streamable=target.streamable, + doc=target.doc, + format=target.format, + type_=target.type_, + ) + ) + expression = "${" + if self_name: + expression += f"\n var self=inputs.{self_name};" + expression += ( + """ + return {"result": function(){""" + + expr[2:-2] + + """}()}; + }""" + ) + hints = None + procs: list[ + Union[cwl.CommandLineTool, cwl.ExpressionTool, cwl.Workflow, cwl.WorkflowStep] + ] = [] + if process: + procs.append(process) + if extra_processes: + procs.extend(extra_processes) + inlineJSReq = cwl.InlineJavascriptRequirement(find_expressionLib(procs)) + reqs = [inlineJSReq] + if process: + if process.hints: + hints = copy.deepcopy(process.hints) + hints[:] = [ + x for x in hints if not isinstance(x, cwl.InitialWorkDirRequirement) + ] + if process.requirements: + reqs.extend(copy.deepcopy(process.requirements)) + reqs[:] = [ + x for x in reqs if not isinstance(x, cwl.InitialWorkDirRequirement) + ] + return cwl.ExpressionTool( + id="_:" + str(uuid.uuid4()), + inputs=inputs, + outputs=outputs, + expression=expression, + requirements=reqs, + cwlVersion="v1.0", + ) + + +def traverse_step( + step: cwl.WorkflowStep, + parent: cwl.Workflow, + replace_etool: bool, + skip_command_line1: bool, + skip_command_line2: bool, +) -> bool: + """Process the given WorkflowStep.""" + modified = False + inputs = empty_inputs(step, parent) + if not step.id: + return False + step_id = step.id.split("#")[-1] + original_process = copy.deepcopy(step.run) + original_step_ins = copy.deepcopy(step.in_) + for inp in step.in_: + if inp.valueFrom: + if not inp.source: + self = None + else: + if isinstance(inp.source, MutableSequence): + self = [] + for source in inp.source: + if isinstance(step, cwl.ScatterWorkflowStep): + scattered_source_type = utils.type_for_source( + parent, source + ) + if isinstance(scattered_source_type, list): + for stype in scattered_source_type: + self.append(example_input(stype.type_)) + else: + self.append(example_input(scattered_source_type.type_)) + else: + self.append( + example_input( + utils.type_for_source(parent, source.split("#")[-1]) + ) + ) + else: + if isinstance(step, cwl.ScatterWorkflowStep): + scattered_source_type2 = utils.type_for_source( + parent, inp.source + ) + if isinstance(scattered_source_type2, list): + self = example_input(scattered_source_type2[0].type_) + else: + self = example_input(scattered_source_type2.type_) + else: + self = example_input( + utils.type_for_source(parent, inp.source.split("#")[-1]) + ) + expression = get_expression(inp.valueFrom, inputs, self) + if expression: + modified = True + etool_id = "_expression_{}_{}".format(step_id, inp.id.split("/")[-1]) + target = get_input_for_id(inp.id, original_process) + if not target: + raise WorkflowException("target not found") + input_source_id = None + source_type: Optional[ + Union[list[cwl.WorkflowInputParameter], cwl.WorkflowInputParameter] + ] = None + if inp.source: + if isinstance(inp.source, MutableSequence): + input_source_id = [] + source_types: list[cwl.WorkflowInputParameter] = [] + for source in inp.source: + source_id = source.split("#")[-1] + input_source_id.append(source_id) + temp_type = utils.type_for_source( + step.run, source_id, parent + ) + if isinstance(temp_type, list): + for ttype in temp_type: + if ttype not in source_types: + source_types.append(ttype) + else: + if temp_type not in source_types: + source_types.append(temp_type) + source_type = cwl.WorkflowInputParameter( + id=None, + type_=cwl.ArraySchema(source_types, "array"), + ) + else: + input_source_id = inp.source.split("#")[-1] + source_type = utils.param_for_source_id( + step.run, input_source_id, parent + ) + # target.id = target.id.split('#')[-1] + if isinstance(original_process, cwl.ExpressionTool): + found_JSReq = False + reqs: list[cwl.ProcessRequirement] = [] + if original_process.hints: + reqs.extend(original_process.hints) + if original_process.requirements: + reqs.extend(original_process.requirements) + for req in reqs: + if isinstance(req, cwl.InlineJavascriptRequirement): + found_JSReq = True + if not found_JSReq: + if not step.run.requirements: + step.run.requirements = [] + expr_lib = find_expressionLib([parent]) + step.run.requirements.append( + cwl.InlineJavascriptRequirement(expr_lib) + ) + replace_step_valueFrom_expr_with_etool( + expression, + etool_id, + parent, + target, + step, + inp, + original_process, + original_step_ins, + input_source_id, + replace_etool, + source_type, + ) + inp.valueFrom = None + inp.source = f"{etool_id}/result" + if step.when: + expression = get_expression(string=step.when, inputs=inputs, self=None) + if expression: + modified = True + replace_step_when_expr_with_etool( + expression, parent, step, original_step_ins, replace_etool + ) + + # TODO: skip or special process for sub workflows? + process_modified = process_level_reqs( + original_process, + step, + parent, + replace_etool, + skip_command_line1, + skip_command_line2, + ) + if process_modified: + modified = True + if isinstance(original_process, cwl.CommandLineTool): + clt_modified = traverse_CommandLineTool( + original_process, + parent, + step, + replace_etool, + skip_command_line1, + skip_command_line2, + ) + if clt_modified: + modified = True + return modified + + +def workflow_step_to_WorkflowInputParameters( + step_ins: list[cwl.WorkflowStepInput], parent: cwl.Workflow, except_in_id: str +) -> list[cwl.WorkflowInputParameter]: + """Create WorkflowInputParameters to match the given WorkflowStep inputs.""" + params = [] + for inp in step_ins: + if not inp.id: + continue + inp_id = inp.id.split("#")[-1].split("/")[-1] + if inp.source and inp_id != except_in_id: + param = copy.deepcopy( + utils.param_for_source_id(parent, sourcenames=inp.source) + ) + if isinstance(param, list): + for p in param: + p.id = inp_id + p.type_ = clean_type_ids(p.type_) + params.append(p) + else: + param.id = inp_id + param.type_ = clean_type_ids(param.type_) + params.append(param) + return params + + +def replace_step_valueFrom_expr_with_etool( + expr: str, + name: str, + workflow: cwl.Workflow, + target: Union[cwl.CommandInputParameter, cwl.WorkflowInputParameter], + step: cwl.WorkflowStep, + step_inp: cwl.WorkflowStepInput, + original_process: Union[cwl.CommandLineTool, cwl.ExpressionTool], + original_step_ins: list[cwl.WorkflowStepInput], + source: Optional[Union[str, list[str]]], + replace_etool: bool, + source_type: Optional[ + Union[cwl.WorkflowInputParameter, list[cwl.WorkflowInputParameter]] + ] = None, +) -> None: + """Replace a WorkflowStep level 'valueFrom' expression with a sibling ExpressionTool step.""" + if not step_inp.id: + raise WorkflowException(f"Missing id in {step_inp}.") + step_inp_id = step_inp.id.split("/")[-1] + etool_inputs = workflow_step_to_WorkflowInputParameters( + original_step_ins, workflow, step_inp_id + ) + if source: + source_param = cwl.WorkflowInputParameter(id="self", type_="Any") + # TODO: would be nicer to derive a proper type; but in the face of linkMerge, this is easier for now + etool_inputs.append(source_param) + temp_etool = generate_etool_from_expr2( + expr, + target, + etool_inputs, + "self" if source else None, + original_process, + [workflow, step], + ) + if replace_etool: + processes: list[ + Union[ + cwl.Workflow, cwl.CommandLineTool, cwl.ExpressionTool, cwl.WorkflowStep + ] + ] = [ + workflow, + step, + ] + cltool = etool_to_cltool(temp_etool, find_expressionLib(processes)) + etool: Union[cwl.ExpressionTool, cwl.CommandLineTool] = cltool + else: + etool = temp_etool + wf_step_inputs = copy.deepcopy(original_step_ins) + if source: + wf_step_inputs.append(cwl.WorkflowStepInput(id="self", source=step_inp.source)) + for wf_step_input in wf_step_inputs: + if not wf_step_input.id: + continue + wf_step_input.id = wf_step_input.id.split("/")[-1] + if wf_step_input.valueFrom: + wf_step_input.valueFrom = None + if wf_step_input.source: + if isinstance(wf_step_input.source, MutableSequence): + for index, inp_source in enumerate(wf_step_input.source): + wf_step_input.source[index] = inp_source.split("#")[-1] + else: + wf_step_input.source = wf_step_input.source.split("#")[-1] + wf_step_inputs[:] = [ + x + for x in wf_step_inputs + if x.id and not (x.id.startswith("_") or x.id.endswith(step_inp_id)) + ] + if isinstance(step, cwl.ScatterWorkflowStep): + scatter = copy.deepcopy(step.scatter) + if isinstance(scatter, str): + scatter = [scatter] + if isinstance(scatter, MutableSequence): + for index, entry in enumerate(scatter): + scatter[index] = entry.split("/")[-1] + if scatter and step_inp_id in scatter: + scatter = ["self"] + # do we still need to scatter? + else: + scatter = None + workflow.steps.append( + cwl.ScatterWorkflowStep( + id=name, + in_=wf_step_inputs, + out=[cwl.WorkflowStepOutput("result")], + run=etool, + scatter=scatter, + scatterMethod=step.scatterMethod, + ) + ) + else: + workflow.steps.append( + cwl.WorkflowStep( + id=name, + in_=wf_step_inputs, + out=[cwl.WorkflowStepOutput("result")], + run=etool, + ) + ) + + +def replace_step_when_expr_with_etool( + expr: str, + workflow: cwl.Workflow, + step: cwl.WorkflowStep, + original_step_ins: list[cwl.WorkflowStepInput], + replace_etool: bool, +) -> None: + """Replace a WorkflowStep level 'when' expression with a sibling ExpressionTool step.""" + if not step.id: + raise WorkflowException(f"Missing id from {step}.") + etool_id = "_when_expression_{}".format(step.id.split("#")[-1]) + etool_inputs = workflow_step_to_WorkflowInputParameters( + original_step_ins, workflow, "" + ) + temp_etool = generate_etool_from_expr2( + expr, + cwl.WorkflowInputParameter(id=None, type_="boolean"), + etool_inputs, + None, + None, + [workflow, step], + ) + if replace_etool: + processes: list[ + Union[ + cwl.Workflow, cwl.CommandLineTool, cwl.ExpressionTool, cwl.WorkflowStep + ] + ] = [ + workflow, + step, + ] + cltool = etool_to_cltool(temp_etool, find_expressionLib(processes)) + etool: Union[cwl.ExpressionTool, cwl.CommandLineTool] = cltool + else: + etool = temp_etool + wf_step_inputs = copy.deepcopy(original_step_ins) + for wf_step_input in wf_step_inputs: + if not wf_step_input.id: + continue + wf_step_input.id = wf_step_input.id.split("/")[-1] + if wf_step_input.source: + if isinstance(wf_step_input.source, MutableSequence): + for index, inp_source in enumerate(wf_step_input.source): + wf_step_input.source[index] = inp_source.split("#")[-1] + else: + wf_step_input.source = wf_step_input.source.split("#")[-1] + wf_step_inputs[:] = [x for x in wf_step_inputs if x.id and not x.id.startswith("_")] + if isinstance(step, cwl.ScatterWorkflowStep): + scatter = copy.deepcopy(step.scatter) + if isinstance(scatter, str): + scatter = [scatter] + if isinstance(scatter, MutableSequence): + for index, entry in enumerate(scatter): + scatter[index] = entry.split("/")[-1] + scatter = step.scatter + workflow.steps.append( + cwl.ScatterWorkflowStep( + id=etool_id, + in_=wf_step_inputs, + out=[cwl.WorkflowStepOutput("result")], + run=etool, + scatter=scatter, + scatterMethod=step.scatterMethod, + ) + ) + else: + workflow.steps.append( + cwl.WorkflowStep( + id=etool_id, + in_=wf_step_inputs, + out=[cwl.WorkflowStepOutput("result")], + run=etool, + ) + ) + step.when = "$(inputs._when)" + step.in_.append(cwl.WorkflowStepInput(id="_when", source=f"{etool_id}/result")) + + +def traverse_workflow( + workflow: cwl.Workflow, + replace_etool: bool, + skip_command_line1: bool, + skip_command_line2: bool, +) -> tuple[cwl.Workflow, bool]: + """Traverse a workflow, processing each step.""" + modified = False + for index, step in enumerate(workflow.steps): + if isinstance(step.run, cwl.ExpressionTool) and replace_etool: + workflow.steps[index].run = etool_to_cltool(step.run) + modified = True + else: + step_modified = load_step( + step, replace_etool, skip_command_line1, skip_command_line2 + ) + if step_modified: + modified = True + for step in workflow.steps: + if not step.id.startswith("_expression"): + step_modified = traverse_step( + step, workflow, replace_etool, skip_command_line1, skip_command_line2 + ) + if step_modified: + modified = True + if process_workflow_inputs_and_outputs(workflow, replace_etool): + modified = True + if process_workflow_reqs_and_hints(workflow, replace_etool): + modified = True + if workflow.requirements: + workflow.requirements[:] = [ + x + for x in workflow.requirements + if not isinstance( + x, (cwl.InlineJavascriptRequirement, cwl.StepInputExpressionRequirement) + ) + ] + else: + workflow.requirements = None + return workflow, modified diff --git a/cwl_utils/parser/__init__.py b/cwl_utils/parser/__init__.py index 3d0868e6..0581d26b 100644 --- a/cwl_utils/parser/__init__.py +++ b/cwl_utils/parser/__init__.py @@ -1,7 +1,6 @@ # SPDX-License-Identifier: Apache-2.0 import os -from abc import ABC from collections.abc import MutableMapping, MutableSequence from pathlib import Path from typing import Any, Optional, Union, cast @@ -11,218 +10,306 @@ from schema_salad.utils import yaml_no_ts from ..errors import GraphTargetMissingException -from . import cwl_v1_0, cwl_v1_1, cwl_v1_2 - - -class NoType(ABC): - pass - +from . import cwl_v1_0, cwl_v1_1, cwl_v1_2, cwl_v1_3 LoadingOptions = Union[ - cwl_v1_0.LoadingOptions, cwl_v1_1.LoadingOptions, cwl_v1_2.LoadingOptions + cwl_v1_0.LoadingOptions, + cwl_v1_1.LoadingOptions, + cwl_v1_2.LoadingOptions, + cwl_v1_3.LoadingOptions, ] """Type union for a CWL v1.x LoadingOptions object.""" -Saveable = Union[cwl_v1_0.Saveable, cwl_v1_1.Saveable, cwl_v1_2.Saveable] +Saveable = Union[ + cwl_v1_0.Saveable, cwl_v1_1.Saveable, cwl_v1_2.Saveable, cwl_v1_3.Saveable +] """Type union for a CWL v1.x Saveable object.""" InputParameter = Union[ - cwl_v1_0.InputParameter, cwl_v1_1.InputParameter, cwl_v1_2.InputParameter + cwl_v1_0.InputParameter, + cwl_v1_1.InputParameter, + cwl_v1_2.InputParameter, + cwl_v1_3.InputParameter, ] """Type union for a CWL v1.x InputEnumSchema object.""" InputRecordField = Union[ cwl_v1_0.InputRecordField, cwl_v1_1.InputRecordField, cwl_v1_2.InputRecordField, + cwl_v1_3.InputRecordField, ] """Type union for a CWL v1.x InputRecordSchema object.""" -InputSchema = Union[cwl_v1_0.InputSchema, cwl_v1_1.InputSchema, cwl_v1_2.InputSchema] +InputSchema = Union[ + cwl_v1_0.InputSchema, + cwl_v1_1.InputSchema, + cwl_v1_2.InputSchema, + cwl_v1_3.InputSchema, +] """Type union for a CWL v1.x InputSchema object.""" OutputParameter = Union[ - cwl_v1_0.OutputParameter, cwl_v1_1.OutputParameter, cwl_v1_2.OutputParameter + cwl_v1_0.OutputParameter, + cwl_v1_1.OutputParameter, + cwl_v1_2.OutputParameter, + cwl_v1_3.OutputParameter, ] """Type union for a CWL v1.x OutputParameter object.""" OutputArraySchema = Union[ cwl_v1_0.OutputArraySchema, cwl_v1_1.OutputArraySchema, cwl_v1_2.OutputArraySchema, + cwl_v1_3.OutputArraySchema, ] """Type union for a CWL v1.x OutputArraySchema object.""" OutputEnumSchema = Union[ cwl_v1_0.OutputEnumSchema, cwl_v1_1.OutputEnumSchema, cwl_v1_2.OutputEnumSchema, + cwl_v1_3.OutputEnumSchema, ] """Type union for a CWL v1.x OutputEnumSchema object.""" OutputRecordField = Union[ cwl_v1_0.OutputRecordField, cwl_v1_1.OutputRecordField, cwl_v1_2.OutputRecordField, + cwl_v1_3.OutputRecordField, ] """Type union for a CWL v1.x OutputRecordField object.""" OutputRecordSchema = Union[ cwl_v1_0.OutputRecordSchema, cwl_v1_1.OutputRecordSchema, cwl_v1_2.OutputRecordSchema, + cwl_v1_3.OutputRecordSchema, ] """Type union for a CWL v1.x OutputRecordSchema object.""" OutputSchema = Union[ - cwl_v1_0.OutputSchema, cwl_v1_1.OutputSchema, cwl_v1_2.OutputSchema + cwl_v1_0.OutputSchema, + cwl_v1_1.OutputSchema, + cwl_v1_2.OutputSchema, + cwl_v1_3.OutputSchema, ] """Type union for a CWL v1.x OutputSchema object.""" -Workflow = Union[cwl_v1_0.Workflow, cwl_v1_1.Workflow, cwl_v1_2.Workflow] -WorkflowTypes = (cwl_v1_0.Workflow, cwl_v1_1.Workflow, cwl_v1_2.Workflow) +Workflow = Union[ + cwl_v1_0.Workflow, cwl_v1_1.Workflow, cwl_v1_2.Workflow, cwl_v1_3.Workflow +] +WorkflowTypes = ( + cwl_v1_0.Workflow, + cwl_v1_1.Workflow, + cwl_v1_2.Workflow, + cwl_v1_3.Workflow, +) """Type union for a CWL v1.x Workflow object.""" WorkflowInputParameter = Union[ cwl_v1_0.InputParameter, cwl_v1_1.WorkflowInputParameter, cwl_v1_2.WorkflowInputParameter, + cwl_v1_3.WorkflowInputParameter, ] """Type union for a CWL v1.x WorkflowInputParameter object.""" WorkflowOutputParameter = Union[ cwl_v1_0.WorkflowOutputParameter, cwl_v1_1.WorkflowOutputParameter, cwl_v1_2.WorkflowOutputParameter, + cwl_v1_3.WorkflowOutputParameter, ] """Type union for a CWL v1.x WorkflowOutputParameter object.""" WorkflowStep = Union[ - cwl_v1_0.WorkflowStep, cwl_v1_1.WorkflowStep, cwl_v1_2.WorkflowStep + cwl_v1_0.WorkflowStep, + cwl_v1_1.WorkflowStep, + cwl_v1_2.WorkflowStep, + cwl_v1_3.WorkflowStep, + cwl_v1_3.LoopWorkflowStep, + cwl_v1_3.ScatterWorkflowStep, ] """Type union for a CWL v1.x WorkflowStep object.""" ScatterWorkflowStep = Union[ cwl_v1_0.WorkflowStep, cwl_v1_1.WorkflowStep, cwl_v1_2.WorkflowStep, + cwl_v1_3.ScatterWorkflowStep, ] """Type union for a CWL v1.x ScatterWorkflowStep object.""" -LoopWorkflowStep = NoType +LoopWorkflowStep = Union[cwl_v1_3.LoopWorkflowStep] """Type union for a CWL v1.x LoopWorkflowStep object.""" WorkflowStepInput = Union[ - cwl_v1_0.WorkflowStepInput, cwl_v1_1.WorkflowStepInput, cwl_v1_2.WorkflowStepInput + cwl_v1_0.WorkflowStepInput, + cwl_v1_1.WorkflowStepInput, + cwl_v1_2.WorkflowStepInput, + cwl_v1_3.WorkflowStepInput, ] """Type union for a CWL v1.x WorkflowStepInput object.""" WorkflowStepOutput = Union[ cwl_v1_0.WorkflowStepOutput, cwl_v1_1.WorkflowStepOutput, cwl_v1_2.WorkflowStepOutput, + cwl_v1_3.WorkflowStepOutput, ] """Type union for a CWL v1.x WorkflowStepOutput object.""" CommandLineTool = Union[ - cwl_v1_0.CommandLineTool, cwl_v1_1.CommandLineTool, cwl_v1_2.CommandLineTool + cwl_v1_0.CommandLineTool, + cwl_v1_1.CommandLineTool, + cwl_v1_2.CommandLineTool, + cwl_v1_3.CommandLineTool, ] CommandLineToolTypes = ( cwl_v1_0.CommandLineTool, cwl_v1_1.CommandLineTool, cwl_v1_2.CommandLineTool, + cwl_v1_3.CommandLineTool, ) """Type union for a CWL v1.x CommandLineTool object.""" CommandLineBinding = Union[ cwl_v1_0.CommandLineBinding, cwl_v1_1.CommandLineBinding, cwl_v1_2.CommandLineBinding, + cwl_v1_3.CommandLineBinding, ] """Type union for a CWL v1.x CommandLineBinding object.""" CommandOutputBinding = Union[ cwl_v1_0.CommandOutputBinding, cwl_v1_1.CommandOutputBinding, cwl_v1_2.CommandOutputBinding, + cwl_v1_3.CommandOutputBinding, ] """Type union for a CWL v1.x CommandOutputBinding object.""" CommandInputParameter = Union[ cwl_v1_0.CommandInputParameter, cwl_v1_1.CommandInputParameter, cwl_v1_2.CommandInputParameter, + cwl_v1_3.CommandInputParameter, ] """Type union for a CWL v1.x CommandInputParameter object.""" CommandOutputParameter = Union[ cwl_v1_0.CommandOutputParameter, cwl_v1_1.CommandOutputParameter, cwl_v1_2.CommandOutputParameter, + cwl_v1_3.CommandOutputParameter, ] """Type union for a CWL v1.x CommandOutputParameter object.""" ExpressionTool = Union[ - cwl_v1_0.ExpressionTool, cwl_v1_1.ExpressionTool, cwl_v1_2.ExpressionTool + cwl_v1_0.ExpressionTool, + cwl_v1_1.ExpressionTool, + cwl_v1_2.ExpressionTool, + cwl_v1_3.ExpressionTool, ] """Type union for a CWL v1.x ExpressionTool object.""" ExpressionToolOutputParameter = Union[ cwl_v1_0.ExpressionToolOutputParameter, cwl_v1_1.ExpressionToolOutputParameter, cwl_v1_2.ExpressionToolOutputParameter, + cwl_v1_3.ExpressionToolOutputParameter, ] """Type union for a CWL v1.x ExpressionToolOutputParameter object.""" DockerRequirement = Union[ - cwl_v1_0.DockerRequirement, cwl_v1_1.DockerRequirement, cwl_v1_2.DockerRequirement + cwl_v1_0.DockerRequirement, + cwl_v1_1.DockerRequirement, + cwl_v1_2.DockerRequirement, + cwl_v1_3.DockerRequirement, ] DockerRequirementTypes = ( cwl_v1_0.DockerRequirement, cwl_v1_1.DockerRequirement, cwl_v1_2.DockerRequirement, + cwl_v1_3.DockerRequirement, ) """Type union for a CWL v1.x DockerRequirement object.""" -Process = Union[Workflow, CommandLineTool, ExpressionTool, cwl_v1_2.Operation] +Operation = Union[cwl_v1_2.Operation, cwl_v1_3.Operation] +"""Type Union for a CWL v1.x Operation object.""" +Process = Union[Workflow, CommandLineTool, ExpressionTool, Operation] """Type Union for a CWL v1.x Process object.""" ProcessRequirement = Union[ cwl_v1_0.ProcessRequirement, cwl_v1_1.ProcessRequirement, cwl_v1_2.ProcessRequirement, + cwl_v1_3.ProcessRequirement, ] """Type Union for a CWL v1.x ProcessRequirement object.""" ProcessRequirementTypes = ( cwl_v1_0.ProcessRequirement, cwl_v1_1.ProcessRequirement, cwl_v1_2.ProcessRequirement, + cwl_v1_3.ProcessRequirement, ) SoftwareRequirement = Union[ cwl_v1_0.SoftwareRequirement, cwl_v1_1.SoftwareRequirement, cwl_v1_2.SoftwareRequirement, + cwl_v1_3.SoftwareRequirement, ] SoftwareRequirementTypes = ( cwl_v1_0.SoftwareRequirement, cwl_v1_1.SoftwareRequirement, cwl_v1_2.SoftwareRequirement, + cwl_v1_3.SoftwareRequirement, ) """Type union for a CWL v1.x SoftwareRequirement object.""" -ArraySchema = Union[cwl_v1_0.ArraySchema, cwl_v1_1.ArraySchema, cwl_v1_2.ArraySchema] +ArraySchema = Union[ + cwl_v1_0.ArraySchema, + cwl_v1_1.ArraySchema, + cwl_v1_2.ArraySchema, + cwl_v1_3.ArraySchema, +] InputArraySchema = Union[ - cwl_v1_0.InputArraySchema, cwl_v1_1.InputArraySchema, cwl_v1_2.InputArraySchema + cwl_v1_0.InputArraySchema, + cwl_v1_1.InputArraySchema, + cwl_v1_2.InputArraySchema, + cwl_v1_3.InputArraySchema, ] InputArraySchemaTypes = ( cwl_v1_0.InputArraySchema, cwl_v1_1.InputArraySchema, cwl_v1_2.InputArraySchema, + cwl_v1_3.InputArraySchema, ) """Type Union for a CWL v1.x ArraySchema object.""" -EnumSchema = Union[cwl_v1_0.EnumSchema, cwl_v1_1.EnumSchema, cwl_v1_2.EnumSchema] +EnumSchema = Union[ + cwl_v1_0.EnumSchema, cwl_v1_1.EnumSchema, cwl_v1_2.EnumSchema, cwl_v1_3.EnumSchema +] InputEnumSchema = Union[ - cwl_v1_0.InputEnumSchema, cwl_v1_1.InputEnumSchema, cwl_v1_2.InputEnumSchema + cwl_v1_0.InputEnumSchema, + cwl_v1_1.InputEnumSchema, + cwl_v1_2.InputEnumSchema, + cwl_v1_3.InputEnumSchema, ] InputEnumSchemaTypes = ( cwl_v1_0.InputEnumSchema, cwl_v1_1.InputEnumSchema, cwl_v1_2.InputEnumSchema, + cwl_v1_3.InputEnumSchema, ) """Type Union for a CWL v1.x EnumSchema object.""" RecordSchema = Union[ - cwl_v1_0.RecordSchema, cwl_v1_1.RecordSchema, cwl_v1_2.RecordSchema + cwl_v1_0.RecordSchema, + cwl_v1_1.RecordSchema, + cwl_v1_2.RecordSchema, + cwl_v1_3.RecordSchema, ] InputRecordSchema = Union[ - cwl_v1_0.InputRecordSchema, cwl_v1_1.InputRecordSchema, cwl_v1_2.InputRecordSchema + cwl_v1_0.InputRecordSchema, + cwl_v1_1.InputRecordSchema, + cwl_v1_2.InputRecordSchema, + cwl_v1_3.InputRecordSchema, ] InputRecordSchemaTypes = ( cwl_v1_0.InputRecordSchema, cwl_v1_1.InputRecordSchema, cwl_v1_2.InputRecordSchema, + cwl_v1_3.InputRecordSchema, ) """Type Union for a CWL v1.x RecordSchema object.""" -File = Union[cwl_v1_0.File, cwl_v1_1.File, cwl_v1_2.File] +File = Union[cwl_v1_0.File, cwl_v1_1.File, cwl_v1_2.File, cwl_v1_3.File] """Type Union for a CWL v1.x File object.""" -SecondaryFileSchema = Union[cwl_v1_1.SecondaryFileSchema, cwl_v1_2.SecondaryFileSchema] +SecondaryFileSchema = Union[ + cwl_v1_1.SecondaryFileSchema, + cwl_v1_2.SecondaryFileSchema, + cwl_v1_3.SecondaryFileSchema, +] """Type Union for a CWL v1.x SecondaryFileSchema object.""" -Directory = Union[cwl_v1_0.Directory, cwl_v1_1.Directory, cwl_v1_2.Directory] +Directory = Union[ + cwl_v1_0.Directory, cwl_v1_1.Directory, cwl_v1_2.Directory, cwl_v1_3.Directory +] """Type Union for a CWL v1.x Directory object.""" -Dirent = Union[cwl_v1_0.Dirent, cwl_v1_1.Dirent, cwl_v1_2.Dirent] +Dirent = Union[cwl_v1_0.Dirent, cwl_v1_1.Dirent, cwl_v1_2.Dirent, cwl_v1_3.Dirent] """Type Union for a CWL v1.x Dirent object.""" -_Loader = Union[cwl_v1_0._Loader, cwl_v1_1._Loader, cwl_v1_2._Loader] +_Loader = Union[cwl_v1_0._Loader, cwl_v1_1._Loader, cwl_v1_2._Loader, cwl_v1_3._Loader] """Type union for a CWL v1.x _Loader.""" @@ -285,8 +372,12 @@ def load_document_by_uri( loadingOptions = cwl_v1_2.LoadingOptions( fileuri=real_uri, baseuri=base_uri, copyfrom=loadingOptions ) + elif isinstance(loadingOptions, cwl_v1_3.LoadingOptions): + loadingOptions = cwl_v1_3.LoadingOptions( + fileuri=real_uri, baseuri=base_uri, copyfrom=loadingOptions + ) elif loadingOptions is None: - loadingOptions = cwl_v1_2.LoadingOptions(fileuri=real_uri, baseuri=base_uri) + loadingOptions = cwl_v1_3.LoadingOptions(fileuri=real_uri, baseuri=base_uri) else: raise ValidationException( f"Unsupported loadingOptions type: {type(loadingOptions)}" @@ -348,6 +439,10 @@ def load_document_by_yaml( result = cwl_v1_2.load_document_by_yaml( yaml, uri, cast(Optional[cwl_v1_2.LoadingOptions], loadingOptions) ) + elif version == "v1.3.0-dev1": + result = cwl_v1_3.load_document_by_yaml( + yaml, uri, cast(Optional[cwl_v1_3.LoadingOptions], loadingOptions) + ) elif version is None: raise ValidationException("could not get the cwlVersion") else: @@ -376,6 +471,7 @@ def save( isinstance(val, cwl_v1_0.Saveable) or isinstance(val, cwl_v1_1.Saveable) or isinstance(val, cwl_v1_2.Saveable) + or isinstance(val, cwl_v1_3.Saveable) ): return val.save(top=top, base_url=base_url, relative_uris=relative_uris) if isinstance(val, MutableSequence): @@ -408,6 +504,7 @@ def is_process(v: Any) -> bool: isinstance(v, cwl_v1_0.Process) or isinstance(v, cwl_v1_1.Process) or isinstance(v, cwl_v1_2.Process) + or isinstance(v, cwl_v1_3.Process) ) diff --git a/cwl_utils/parser/cwl_v1_1_utils.py b/cwl_utils/parser/cwl_v1_1_utils.py index 2c2cce2f..2ef4a67d 100644 --- a/cwl_utils/parser/cwl_v1_1_utils.py +++ b/cwl_utils/parser/cwl_v1_1_utils.py @@ -537,27 +537,26 @@ def param_for_source_id( ): params.append(output) if scatter_context is not None: - if scatter_context is not None: - if isinstance(step.scatter, str): - scatter_context.append( - ( - 1, - step.scatterMethod - or "dotproduct", - ) + if isinstance(step.scatter, str): + scatter_context.append( + ( + 1, + step.scatterMethod + or "dotproduct", ) - elif isinstance( - step.scatter, MutableSequence - ): - scatter_context.append( - ( - len(step.scatter), - step.scatterMethod - or "dotproduct", - ) + ) + elif isinstance( + step.scatter, MutableSequence + ): + scatter_context.append( + ( + len(step.scatter), + step.scatterMethod + or "dotproduct", ) - else: - scatter_context.append(None) + ) + else: + scatter_context.append(None) if len(params) == 1: return params[0] elif len(params) > 1: diff --git a/cwl_utils/parser/cwl_v1_2_utils.py b/cwl_utils/parser/cwl_v1_2_utils.py index 544c6080..3e193299 100644 --- a/cwl_utils/parser/cwl_v1_2_utils.py +++ b/cwl_utils/parser/cwl_v1_2_utils.py @@ -628,27 +628,26 @@ def param_for_source_id( ): params.append(output) if scatter_context is not None: - if scatter_context is not None: - if isinstance(step.scatter, str): - scatter_context.append( - ( - 1, - step.scatterMethod - or "dotproduct", - ) + if isinstance(step.scatter, str): + scatter_context.append( + ( + 1, + step.scatterMethod + or "dotproduct", ) - elif isinstance( - step.scatter, MutableSequence - ): - scatter_context.append( - ( - len(step.scatter), - step.scatterMethod - or "dotproduct", - ) + ) + elif isinstance( + step.scatter, MutableSequence + ): + scatter_context.append( + ( + len(step.scatter), + step.scatterMethod + or "dotproduct", ) - else: - scatter_context.append(None) + ) + else: + scatter_context.append(None) if len(params) == 1: return params[0] elif len(params) > 1: diff --git a/cwl_utils/parser/cwl_v1_3.py b/cwl_utils/parser/cwl_v1_3.py new file mode 100644 index 00000000..be5f395a --- /dev/null +++ b/cwl_utils/parser/cwl_v1_3.py @@ -0,0 +1,30425 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. + +import copy +import logging +import os +import pathlib +import tempfile +import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 +import xml.sax # nosec +from abc import ABC, abstractmethod +from collections.abc import MutableMapping, MutableSequence, Sequence +from io import StringIO +from itertools import chain +from typing import Any, Optional, Union, cast +from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit +from urllib.request import pathname2url + +from rdflib import Graph +from rdflib.plugins.parsers.notation3 import BadSyntax +from ruamel.yaml.comments import CommentedMap + +from schema_salad.exceptions import SchemaSaladException, ValidationException +from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ + +_vocab: dict[str, str] = {} +_rvocab: dict[str, str] = {} + +_logger = logging.getLogger("salad") + + +IdxType = MutableMapping[str, tuple[Any, "LoadingOptions"]] + + +class LoadingOptions: + idx: IdxType + fileuri: Optional[str] + baseuri: str + namespaces: MutableMapping[str, str] + schemas: MutableSequence[str] + original_doc: Optional[Any] + addl_metadata: MutableMapping[str, Any] + fetcher: Fetcher + vocab: dict[str, str] + rvocab: dict[str, str] + cache: CacheType + imports: list[str] + includes: list[str] + no_link_check: Optional[bool] + container: Optional[str] + + def __init__( + self, + fetcher: Optional[Fetcher] = None, + namespaces: Optional[dict[str, str]] = None, + schemas: Optional[list[str]] = None, + fileuri: Optional[str] = None, + copyfrom: Optional["LoadingOptions"] = None, + original_doc: Optional[Any] = None, + addl_metadata: Optional[dict[str, str]] = None, + baseuri: Optional[str] = None, + idx: Optional[IdxType] = None, + imports: Optional[list[str]] = None, + includes: Optional[list[str]] = None, + no_link_check: Optional[bool] = None, + container: Optional[str] = None, + ) -> None: + """Create a LoadingOptions object.""" + self.original_doc = original_doc + + if idx is not None: + self.idx = idx + else: + self.idx = copyfrom.idx if copyfrom is not None else {} + + if fileuri is not None: + self.fileuri = fileuri + else: + self.fileuri = copyfrom.fileuri if copyfrom is not None else None + + if baseuri is not None: + self.baseuri = baseuri + else: + self.baseuri = copyfrom.baseuri if copyfrom is not None else "" + + if namespaces is not None: + self.namespaces = namespaces + else: + self.namespaces = copyfrom.namespaces if copyfrom is not None else {} + + if schemas is not None: + self.schemas = schemas + else: + self.schemas = copyfrom.schemas if copyfrom is not None else [] + + if addl_metadata is not None: + self.addl_metadata = addl_metadata + else: + self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} + + if imports is not None: + self.imports = imports + else: + self.imports = copyfrom.imports if copyfrom is not None else [] + + if includes is not None: + self.includes = includes + else: + self.includes = copyfrom.includes if copyfrom is not None else [] + + if no_link_check is not None: + self.no_link_check = no_link_check + else: + self.no_link_check = copyfrom.no_link_check if copyfrom is not None else False + + if container is not None: + self.container = container + else: + self.container = copyfrom.container if copyfrom is not None else None + + if fetcher is not None: + self.fetcher = fetcher + elif copyfrom is not None: + self.fetcher = copyfrom.fetcher + else: + import requests + from cachecontrol.caches import SeparateBodyFileCache + from cachecontrol.wrapper import CacheControl + + root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) + session = CacheControl( + requests.Session(), + cache=SeparateBodyFileCache(root / ".cache" / "salad"), + ) + self.fetcher: Fetcher = DefaultFetcher({}, session) + + self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} + + self.vocab = _vocab + self.rvocab = _rvocab + + if self.namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in self.namespaces.items(): + self.vocab[k] = v + self.rvocab[v] = k + + @property + def graph(self) -> Graph: + """Generate a merged rdflib.Graph from all entries in self.schemas.""" + graph = Graph() + if not self.schemas: + return graph + key = str(hash(tuple(self.schemas))) + if key in self.cache: + return cast(Graph, self.cache[key]) + for schema in self.schemas: + fetchurl = ( + self.fetcher.urljoin(self.fileuri, schema) + if self.fileuri is not None + else pathlib.Path(schema).resolve().as_uri() + ) + if fetchurl not in self.cache or self.cache[fetchurl] is True: + _logger.debug("Getting external schema %s", fetchurl) + try: + content = self.fetcher.fetch_text(fetchurl) + except Exception as e: + _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) + continue + newGraph = Graph() + err_msg = "unknown error" + for fmt in ["xml", "turtle"]: + try: + newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) + self.cache[fetchurl] = newGraph + graph += newGraph + break + except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: + err_msg = str(e) + else: + _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) + self.cache[key] = graph + return graph + + +class Saveable(ABC): + """Mark classes than have a save() and fromDoc() function.""" + + @classmethod + @abstractmethod + def fromDoc( + cls, + _doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Saveable": + """Construct this object from the result of yaml.load().""" + + @abstractmethod + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + """Convert this object to a JSON/YAML friendly dictionary.""" + + +def load_field( + val: Union[str, dict[str, str]], + fieldtype: "_Loader", + baseuri: str, + loadingOptions: LoadingOptions, + lc: Optional[list[Any]] = None, +) -> Any: + """Load field.""" + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url, + loadingOptions, + ) + loadingOptions.imports.append(url) + return result + if "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + val = loadingOptions.fetcher.fetch_text(url) + loadingOptions.includes.append(url) + return fieldtype.load(val, baseuri, loadingOptions, lc=lc) + + +save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] + + +def extract_type(val_type: type[Any]) -> str: + """Take a type of value, and extracts the value as a string.""" + val_str = str(val_type) + return val_str.split("'")[1] + + +def convert_typing(val_type: str) -> str: + """Normalize type names to schema-salad types.""" + if "None" in val_type: + return "null" + if "CommentedSeq" in val_type or "list" in val_type: + return "array" + if "CommentedMap" in val_type or "dict" in val_type: + return "object" + if "False" in val_type or "True" in val_type: + return "boolean" + return val_type + + +def parse_errors(error_message: str) -> tuple[str, str, str]: + """Parse error messages from several loaders into one error message.""" + if not error_message.startswith("Expected"): + return error_message, "", "" + vals = error_message.split("\n") + if len(vals) == 1: + return error_message, "", "" + types = set() + for val in vals: + individual_vals = val.split(" ") + if val == "": + continue + if individual_vals[1] == "one": + individual_vals = val.split("(")[1].split(",") + for t in individual_vals: + types.add(t.strip(" ").strip(")\n")) + elif individual_vals[2] == "").replace("'", "")) + elif individual_vals[0] == "Value": + types.add(individual_vals[-1].strip(".")) + else: + types.add(individual_vals[1].replace(",", "")) + types = {val for val in types if val != "NoneType"} + if "str" in types: + types = {convert_typing(val) for val in types if "'" not in val} + to_print = "" + for val in types: + if "'" in val: + to_print = "value" if len(types) == 1 else "values" + + if to_print == "": + to_print = "type" if len(types) == 1 else "types" + + verb_tensage = "is" if len(types) == 1 else "are" + + return str(types).replace("{", "(").replace("}", ")").replace("'", ""), to_print, verb_tensage + + +def save( + val: Any, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, +) -> save_type: + if isinstance(val, Saveable): + return val.save(top=top, base_url=base_url, relative_uris=relative_uris) + if isinstance(val, MutableSequence): + return [save(v, top=False, base_url=base_url, relative_uris=relative_uris) for v in val] + if isinstance(val, MutableMapping): + newdict = {} + for key in val: + newdict[key] = save(val[key], top=False, base_url=base_url, relative_uris=relative_uris) + return newdict + if val is None or isinstance(val, (int, float, bool, str)): + return val + raise Exception("Not Saveable: %s" % type(val)) + + +def save_with_metadata( + val: Any, + valLoadingOpts: LoadingOptions, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, +) -> save_type: + """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" + saved_val = save(val, top, base_url, relative_uris) + newdict: MutableMapping[str, Any] = {} + if isinstance(saved_val, MutableSequence): + newdict = {"$graph": saved_val} + elif isinstance(saved_val, MutableMapping): + newdict = saved_val + + if valLoadingOpts.namespaces: + newdict["$namespaces"] = valLoadingOpts.namespaces + if valLoadingOpts.schemas: + newdict["$schemas"] = valLoadingOpts.schemas + if valLoadingOpts.baseuri: + newdict["$base"] = valLoadingOpts.baseuri + for k, v in valLoadingOpts.addl_metadata.items(): + if k not in newdict: + newdict[k] = v + + return newdict + + +def expand_url( + url: str, + base_url: str, + loadingOptions: LoadingOptions, + scoped_id: bool = False, + vocab_term: bool = False, + scoped_ref: Optional[int] = None, +) -> str: + if url in ("@id", "@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and ":" in url: + prefix = url.split(":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urlsplit(url) + + if ( + (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urlsplit(base_url) + frg = "" + if bool(splitbase.fragment): + frg = splitbase.fragment + "/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urlsplit(base_url) + sp = splitbase.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException(f"Term {url!r} not in vocabulary") + + return url + + +class _Loader: + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + pass + + +class _AnyLoader(_Loader): + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp: Union[type, tuple[type[str], type[str]]]) -> None: + self.tp = tp + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if not isinstance(doc, self.tp): + raise ValidationException(f"Expected a {self.tp} but got {doc.__class__.__name__}") + return doc + + def __repr__(self) -> str: + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items: _Loader) -> None: + self.items = items + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if not isinstance(doc, MutableSequence): + raise ValidationException( + f"Value is a {convert_typing(extract_type(type(doc)))}, " + f"but valid type for this field is an array." + ) + r: list[Any] = [] + errors: list[SchemaSaladException] = [] + fields: list[str] = [] + for i in range(0, len(doc)): + try: + lf = load_field( + doc[i], _UnionLoader([self, self.items]), baseuri, loadingOptions, lc=lc + ) + flatten = loadingOptions.container != "@list" + if flatten and isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + + if isinstance(doc[i], CommentedMap): + if doc[i].get("id") is not None: + if doc[i].get("id") in fields: + errors.append( + ValidationException( + f"Duplicate field {doc[i].get('id')!r}", + SourceLine(doc[i], "id", str), + [], + ) + ) + else: + fields.append(doc[i].get("id")) + + except ValidationException as e: + e = ValidationException( + "array item is invalid because", SourceLine(doc, i, str), [e] + ) + errors.append(e) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self) -> str: + return f"array<{self.items}>" + + +class _MapLoader(_Loader): + def __init__( + self, + values: _Loader, + name: Optional[str] = None, + container: Optional[str] = None, + no_link_check: Optional[bool] = None, + ) -> None: + self.values = values + self.name = name + self.container = container + self.no_link_check = no_link_check + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if not isinstance(doc, MutableMapping): + raise ValidationException(f"Expected a map, was {type(doc)}") + if self.container is not None or self.no_link_check is not None: + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, container=self.container, no_link_check=self.no_link_check + ) + r: dict[str, Any] = {} + errors: list[SchemaSaladException] = [] + for k, v in doc.items(): + try: + lf = load_field(v, self.values, baseuri, loadingOptions, lc) + r[k] = lf + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, k, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self) -> str: + return self.name if self.name is not None else f"map" + + +class _EnumLoader(_Loader): + def __init__(self, symbols: Sequence[str], name: str) -> None: + self.symbols = symbols + self.name = name + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if doc in self.symbols: + return doc + raise ValidationException(f"Expected one of {self.symbols}") + + def __repr__(self) -> str: + return self.name + + +class _SecondaryDSLLoader(_Loader): + def __init__(self, inner: _Loader) -> None: + self.inner = inner + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + r: list[dict[str, Any]] = [] + if isinstance(doc, MutableSequence): + for d in doc: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict: dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {d}" + ) + new_dict["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None + ) + + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) + ) + r.append(new_dict) + + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + elif isinstance(doc, MutableMapping): + new_dict = {} + doc_copy = copy.deepcopy(doc) + if "pattern" in doc_copy: + new_dict["pattern"] = doc_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {doc}" + ) + new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None + + if len(doc_copy): + raise ValidationException( + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" + ) + r.append(new_dict) + + elif isinstance(doc, str): + if doc.endswith("?"): + r.append({"pattern": doc[:-1], "required": False}) + else: + r.append({"pattern": doc}) + else: + raise ValidationException("Expected str or sequence of str") + return self.inner.load(r, baseuri, loadingOptions, docRoot, lc=lc) + + +class _RecordLoader(_Loader): + def __init__( + self, + classtype: type[Saveable], + container: Optional[str] = None, + no_link_check: Optional[bool] = None, + ) -> None: + self.classtype = classtype + self.container = container + self.no_link_check = no_link_check + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if not isinstance(doc, MutableMapping): + raise ValidationException( + f"Value is a {convert_typing(extract_type(type(doc)))}, " + f"but valid type for this field is an object." + ) + if self.container is not None or self.no_link_check is not None: + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, container=self.container, no_link_check=self.no_link_check + ) + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self) -> str: + return str(self.classtype.__name__) + + +class _ExpressionLoader(_Loader): + def __init__(self, items: type[str]) -> None: + self.items = items + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if not isinstance(doc, str): + raise ValidationException( + f"Value is a {convert_typing(extract_type(type(doc)))}, " + f"but valid type for this field is a str." + ) + return doc + + +class _UnionLoader(_Loader): + def __init__(self, alternates: Sequence[_Loader], name: Optional[str] = None) -> None: + self.alternates = alternates + self.name = name + + def add_loaders(self, loaders: Sequence[_Loader]) -> None: + self.alternates = tuple(loader for loader in chain(self.alternates, loaders)) + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + errors = [] + + if lc is None: + lc = [] + + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot, lc=lc) + except ValidationException as e: + if isinstance(t, _ArrayLoader) and len(self.alternates) > 1: + continue + if isinstance(doc, (CommentedMap, dict)): + if "class" in doc: + if str(doc.get("class")) == str(t): + errors.append( + ValidationException( + f"Object `{baseuri.split('/')[-1]}` is not valid because:", + SourceLine(doc, next(iter(doc)), str), + [e], + ) + ) + else: + if "array" in str(t): + continue + else: + if "id" in doc: + id = baseuri.split("/")[-1] + "#" + str(doc.get("id")) + if "id" in lc: + errors.append( + ValidationException( + f"checking object `{id}` using `{t}`", + SourceLine(lc, "id", str), + [e], + ) + ) + else: + errors.append( + ValidationException( + f"checking object `{id}` using `{t}`", + SourceLine(lc, doc.get("id"), str), + [e], + ) + ) + else: + if not isinstance( + t, (_PrimitiveLoader) + ): # avoids 'tried was {x}' errors + errors.append( + ValidationException(f"tried `{t}` but", None, [e]) + ) + else: + # avoids "tried but x" and instead returns the values for parsing + errors.append(ValidationException("", None, [e])) + + if isinstance(doc, (CommentedMap, dict)) and "class" in doc: + if str(doc.get("class")) not in str(self.alternates): + errors.append( + ValidationException( + "Field `class` contains undefined reference to " + + "`" + + "/".join(baseuri.split("/")[0:-1]) + + "/" + + str(doc.get("class")) + + "`", + SourceLine(doc, "class", str), + [], + ) + ) + raise ValidationException("", None, errors, "*") + + def __repr__(self) -> str: + return self.name if self.name is not None else " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__( + self, + inner: _Loader, + scoped_id: bool, + vocab_term: bool, + scoped_ref: Optional[int], + no_link_check: Optional[bool], + ) -> None: + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + self.no_link_check = no_link_check + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if self.no_link_check is not None: + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, no_link_check=self.no_link_check + ) + if isinstance(doc, MutableSequence): + newdoc = [] + for i in doc: + if isinstance(i, str): + newdoc.append( + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + elif isinstance(doc, str): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + if isinstance(doc, str): + if not loadingOptions.no_link_check: + errors = [] + try: + if not loadingOptions.fetcher.check_exists(doc): + errors.append( + ValidationException(f"contains undefined reference to `{doc}`") + ) + except ValidationException: + pass + if len(errors) > 0: + raise ValidationException("", None, errors) + return self.inner.load(doc, baseuri, loadingOptions, lc=lc) + + +class _TypeDSLLoader(_Loader): + def __init__(self, inner: _Loader, refScope: Optional[int], salad_version: str) -> None: + self.inner = inner + self.refScope = refScope + self.salad_version = salad_version + + def resolve( + self, + doc: str, + baseuri: str, + loadingOptions: LoadingOptions, + ) -> Union[list[Union[dict[str, Any], str]], dict[str, Any], str]: + doc_ = doc + optional = False + if doc_.endswith("?"): + optional = True + doc_ = doc_[0:-1] + + if doc_.endswith("[]"): + salad_versions = [int(v) for v in self.salad_version[1:].split(".")] + items: Union[list[Union[dict[str, Any], str]], dict[str, Any], str] = "" + rest = doc_[0:-2] + if salad_versions < [1, 3]: + if rest.endswith("[]"): + # To show the error message with the original type + return doc + else: + items = expand_url(rest, baseuri, loadingOptions, False, True, self.refScope) + else: + items = self.resolve(rest, baseuri, loadingOptions) + if isinstance(items, str): + items = expand_url(items, baseuri, loadingOptions, False, True, self.refScope) + expanded: Union[dict[str, Any], str] = {"type": "array", "items": items} + else: + expanded = expand_url(doc_, baseuri, loadingOptions, False, True, self.refScope) + + if optional: + return ["null", expanded] + else: + return expanded + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if isinstance(doc, MutableSequence): + r: list[Any] = [] + for d in doc: + if isinstance(d, str): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, str): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions, lc=lc) + + +class _IdMapLoader(_Loader): + def __init__(self, inner: _Loader, mapSubject: str, mapPredicate: Optional[str]) -> None: + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load( + self, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + lc: Optional[list[Any]] = None, + ) -> Any: + if isinstance(doc, MutableMapping): + r: list[Any] = [] + for k in doc.keys(): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions, lc=lc) + + +def _document_load( + loader: _Loader, + doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + baseuri: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> tuple[Any, LoadingOptions]: + if isinstance(doc, str): + return _document_load_by_url( + loader, + loadingOptions.fetcher.urljoin(baseuri, doc), + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + if isinstance(doc, MutableMapping): + addl_metadata = {} + if addl_metadata_fields is not None: + for mf in addl_metadata_fields: + if mf in doc: + addl_metadata[mf] = doc[mf] + + docuri = baseuri + if "$base" in doc: + baseuri = doc["$base"] + + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, + namespaces=doc.get("$namespaces", None), + schemas=doc.get("$schemas", None), + baseuri=doc.get("$base", None), + addl_metadata=addl_metadata, + ) + + doc = copy.copy(doc) + if "$namespaces" in doc: + doc.pop("$namespaces") + if "$schemas" in doc: + doc.pop("$schemas") + if "$base" in doc: + doc.pop("$base") + + if "$graph" in doc: + loadingOptions.idx[baseuri] = ( + loader.load(doc["$graph"], baseuri, loadingOptions), + loadingOptions, + ) + else: + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), + loadingOptions, + ) + + if docuri != baseuri: + loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] + + return loadingOptions.idx[baseuri] + + if isinstance(doc, MutableSequence): + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions), + loadingOptions, + ) + return loadingOptions.idx[baseuri] + + raise ValidationException( + "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) + ) + + +def _document_load_by_url( + loader: _Loader, + url: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> tuple[Any, LoadingOptions]: + if url in loadingOptions.idx: + return loadingOptions.idx[url] + + doc_url, frg = urldefrag(url) + + text = loadingOptions.fetcher.fetch_text(doc_url) + textIO = StringIO(text) + textIO.name = str(doc_url) + yaml = yaml_no_ts() + result = yaml.load(textIO) + add_lc_filename(result, doc_url) + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) + + _document_load( + loader, + result, + doc_url, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + return loadingOptions.idx[url] + + +def file_uri(path: str, split_frag: bool = False) -> str: + """Transform a file path into a URL with file scheme.""" + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = pathname2url(str(pathsp[0])) + else: + urlpath = pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return f"file:{urlpath}{frag}" + return f"file://{urlpath}{frag}" + + +def prefix_url(url: str, namespaces: dict[str, str]) -> str: + """Expand short forms into full URLs using the given namespace dictionary.""" + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri( + uri: Any, + base_url: str, + scoped_id: bool, + ref_scope: Optional[int], + relative_uris: bool, +) -> Any: + """Convert any URI to a relative one, obeying the scoping rules.""" + if isinstance(uri, MutableSequence): + return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] + elif isinstance(uri, str): + if not relative_uris or uri == base_url: + return uri + urisplit = urlsplit(uri) + basesplit = urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) + + +def shortname(inputid: str) -> str: + """ + Compute the shortname of a fully qualified identifier. + + See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. + """ + parsed_id = urlparse(inputid) + if parsed_id.fragment: + return parsed_id.fragment.split("/")[-1] + return parsed_id.path.split("/")[-1] + + +def parser_info() -> str: + return "org.w3id.cwl.v1_3" + + +class Documented(Saveable): + pass + + +class RecordField(Documented): + """ + A field of a record. + """ + + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + ) + return False + + def __hash__(self) -> int: + return hash((self.doc, self.name, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "RecordField": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["doc", "name", "type"]) + + +class RecordSchema(Saveable): + def __init__( + self, + type_: Any, + fields: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordSchema): + return bool(self.fields == other.fields and self.type_ == other.type_) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "RecordSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + fields = None + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `fields`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + fields=fields, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type"]) + + +class EnumSchema(Saveable): + """ + Define an enumerated type. + + """ + + name: str + + def __init__( + self, + symbols: Any, + type_: Any, + name: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "EnumSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) + + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + name=name, + symbols=symbols, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type"]) + + +class ArraySchema(Saveable): + def __init__( + self, + items: Any, + type_: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ArraySchema): + return bool(self.items == other.items and self.type_ == other.type_) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "ArraySchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = load_field( + _doc.get("items"), + uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type"]) + + +class MapSchema(Saveable): + def __init__( + self, + type_: Any, + values: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.type_ = type_ + self.values = values + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MapSchema): + return bool(self.type_ == other.type_ and self.values == other.values) + return False + + def __hash__(self) -> int: + return hash((self.type_, self.values)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "MapSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Map_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("values") is None: + raise ValidationException("missing required field `values`", None, []) + + values = load_field( + _doc.get("values"), + uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("values") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `values`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("values") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `values` field is not valid because:", + SourceLine(_doc, "values", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `values` field is not valid because:", + SourceLine(_doc, "values", str), + [e], + detailed_message=f"the `values` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `type`, `values`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + type_=type_, + values=values, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.values is not None: + u = save_relative_uri(self.values, base_url, False, 2, relative_uris) + r["values"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["type", "values"]) + + +class UnionSchema(Saveable): + def __init__( + self, + names: Any, + type_: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.names = names + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, UnionSchema): + return bool(self.names == other.names and self.type_ == other.type_) + return False + + def __hash__(self) -> int: + return hash((self.names, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "UnionSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("names") is None: + raise ValidationException("missing required field `names`", None, []) + + names = load_field( + _doc.get("names"), + uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("names") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `names`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("names") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `names` field is not valid because:", + SourceLine(_doc, "names", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `names` field is not valid because:", + SourceLine(_doc, "names", str), + [e], + detailed_message=f"the `names` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Union_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `names`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + names=names, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.names is not None: + u = save_relative_uri(self.names, base_url, False, 2, relative_uris) + r["names"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["names", "type"]) + + +class CWLArraySchema(ArraySchema): + def __init__( + self, + items: Any, + type_: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CWLArraySchema): + return bool(self.items == other.items and self.type_ == other.type_) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CWLArraySchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = load_field( + _doc.get("items"), + uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type"]) + + +class CWLRecordField(RecordField): + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CWLRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + ) + return False + + def __hash__(self) -> int: + return hash((self.doc, self.name, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CWLRecordField": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["doc", "name", "type"]) + + +class CWLRecordSchema(RecordSchema): + def __init__( + self, + type_: Any, + fields: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CWLRecordSchema): + return bool(self.fields == other.fields and self.type_ == other.type_) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CWLRecordSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + fields = None + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `fields`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + fields=fields, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type"]) + + +class File(Saveable): + """ + Represents a file (or group of files when `secondaryFiles` is provided) that + will be accessible by tools using standard POSIX file system call API such as + open(2) and read(2). + + Files are represented as objects with `class` of `File`. File objects have + a number of properties that provide metadata about the file. + + The `location` property of a File is a IRI that uniquely identifies the + file. Implementations must support the `file://` IRI scheme and may support + other schemes such as `http://` and `https://`. The value of `location` may also be a + relative reference, in which case it must be resolved relative to the IRI + of the document it appears in. Alternately to `location`, implementations + must also accept the `path` property on File, which must be a filesystem + path available on the same host as the CWL runner (for inputs) or the + runtime environment of a command line tool execution (for command line tool + outputs). + + If no `location` or `path` is specified, a file object must specify + `contents` with the UTF-8 text content of the file. This is a "file + literal". File literals do not correspond to external resources, but are + created on disk with `contents` with when needed for executing a tool. + Where appropriate, expressions can return file literals to define new files + on a runtime. The maximum size of `contents` is 64 kilobytes. + + The `basename` property defines the filename on disk where the file is + staged. This may differ from the resource name. If not provided, + `basename` must be computed from the last path part of `location` and made + available to expressions. + + The `secondaryFiles` property is a list of File or Directory objects that + must be staged in the same directory as the primary file. It is an error + for file names to be duplicated in `secondaryFiles`. + + The `size` property is the size in bytes of the File. It must be computed + from the resource and made available to expressions. The `checksum` field + contains a cryptographic hash of the file content for use it verifying file + contents. Implementations may, at user option, enable or disable + computation of the `checksum` field for performance or other reasons. + However, the ability to compute output checksums is required to pass the + CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be + staged to an arbitrary directory, but must use the value of `basename` for + the filename. The `path` property must be file path in the context of the + tool execution runtime (local to the compute node, or within the executing + container). All computed properties should be available to expressions. + File literals also must be staged and `path` must be set. + + When collecting CommandLineTool outputs, `glob` matching returns file paths + (with the `path` property) and the derived properties. This can all be + modified by `outputEval`. Alternately, if the file `cwl.output.json` is + present in the output, `outputBinding` is ignored. + + File objects in the output must provide either a `location` IRI or a `path` + property in the context of the tool execution runtime (local to the compute + node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via + `location` (the expression tool does not have access to files on disk so + `path` is meaningless) or as file literals. It is legal to return a file + object with an existing `location` but a different `basename`. The + `loadContents` field of ExpressionTool inputs behaves the same as on + CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + dirname: Optional[Any] = None, + nameroot: Optional[Any] = None, + nameext: Optional[Any] = None, + checksum: Optional[Any] = None, + size: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + format: Optional[Any] = None, + contents: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, File): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "File": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_File_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + location = None + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("location") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `location`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("location") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + detailed_message=f"the `location` field with value `{val}` " + "is not valid because:", + ) + ) + path = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("path") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `path`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("path") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + detailed_message=f"the `path` field with value `{val}` " + "is not valid because:", + ) + ) + basename = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("basename") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `basename`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("basename") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + detailed_message=f"the `basename` field with value `{val}` " + "is not valid because:", + ) + ) + dirname = None + if "dirname" in _doc: + try: + dirname = load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dirname") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dirname`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("dirname") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + detailed_message=f"the `dirname` field with value `{val}` " + "is not valid because:", + ) + ) + nameroot = None + if "nameroot" in _doc: + try: + nameroot = load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("nameroot") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `nameroot`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("nameroot") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + detailed_message=f"the `nameroot` field with value `{val}` " + "is not valid because:", + ) + ) + nameext = None + if "nameext" in _doc: + try: + nameext = load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("nameext") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `nameext`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("nameext") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + detailed_message=f"the `nameext` field with value `{val}` " + "is not valid because:", + ) + ) + checksum = None + if "checksum" in _doc: + try: + checksum = load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("checksum") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `checksum`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("checksum") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + detailed_message=f"the `checksum` field with value `{val}` " + "is not valid because:", + ) + ) + size = None + if "size" in _doc: + try: + size = load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + lc=_doc.get("size") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `size`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("size") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [e], + detailed_message=f"the `size` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + contents = None + if "contents" in _doc: + try: + contents = load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("contents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `contents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("contents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + detailed_message=f"the `contents` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.location is not None: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + if self.path is not None: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + if self.basename is not None: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.dirname is not None: + r["dirname"] = save( + self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.nameroot is not None: + r["nameroot"] = save( + self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.nameext is not None: + r["nameext"] = save( + self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.checksum is not None: + r["checksum"] = save( + self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.size is not None: + r["size"] = save( + self.size, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + if self.contents is not None: + r["contents"] = save( + self.contents, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with `class` of `Directory`. Directory objects have + a number of properties that provide metadata about the directory. + + The `location` property of a Directory is a IRI that uniquely identifies + the directory. Implementations must support the file:// IRI scheme and may + support other schemes such as http://. Alternately to `location`, + implementations must also accept the `path` property on Directory, which + must be a filesystem path available on the same host as the CWL runner (for + inputs) or the runtime environment of a command line tool execution (for + command line tool outputs). + + A Directory object may have a `listing` field. This is a list of File and + Directory objects that are contained in the Directory. For each entry in + `listing`, the `basename` property defines the name of the File or + Subdirectory when staged to disk. If `listing` is not provided, the + implementation must have some way of fetching the Directory listing at + runtime based on the `location` field. + + If a Directory does not have `location`, it is a Directory literal. A + Directory literal must provide `listing`. Directory literals must be + created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied + relationship in their `location`. For example, a Directory listing may + contain two files located on different hosts. It is the responsibility of + the runtime to ensure that those files are staged to disk appropriately. + Secondary files associated with files in `listing` must also be staged to + the same Directory. + + When executing a CommandLineTool, Directories must be recursively staged + first and have local values of `path` assigned. + + Directory objects in CommandLineTool output must provide either a + `location` IRI or a `path` property in the context of the tool execution + runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + Name conflicts (the same `basename` appearing multiple times in `listing` + or in any entry in `secondaryFiles` in the listing) is a fatal error. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + listing: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) + return False + + def __hash__(self) -> int: + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "Directory": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_Directory_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + location = None + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("location") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `location`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("location") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + detailed_message=f"the `location` field with value `{val}` " + "is not valid because:", + ) + ) + path = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("path") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `path`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("path") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + detailed_message=f"the `path` field with value `{val}` " + "is not valid because:", + ) + ) + basename = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("basename") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `basename`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("basename") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + detailed_message=f"the `basename` field with value `{val}` " + "is not valid because:", + ) + ) + listing = None + if "listing" in _doc: + try: + listing = load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("listing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `listing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("listing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + detailed_message=f"the `listing` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + location=location, + path=path, + basename=basename, + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.location is not None: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + if self.path is not None: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + if self.basename is not None: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.listing is not None: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "location", "path", "basename", "listing"]) + + +class Labeled(Saveable): + pass + + +class Identified(Saveable): + pass + + +class LoadContents(Saveable): + pass + + +class FieldBase(Labeled): + pass + + +class InputFormat(Saveable): + pass + + +class OutputFormat(Saveable): + pass + + +class Parameter(FieldBase, Documented, Identified): + """ + Define an input or output parameter to a process. + + """ + + pass + + +class InputBinding(Saveable): + def __init__( + self, + loadContents: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputBinding): + return bool(self.loadContents == other.loadContents) + return False + + def __hash__(self) -> int: + return hash((self.loadContents)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "InputBinding": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + loadContents=loadContents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents"]) + + +class IOSchema(Labeled, Documented): + pass + + +class InputSchema(IOSchema): + pass + + +class OutputSchema(IOSchema): + pass + + +class InputRecordField(CWLRecordField, FieldBase, InputFormat, LoadContents): + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type_, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "InputRecordField": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + ] + ) + + +class InputRecordSchema(CWLRecordSchema, InputSchema): + name: str + + def __init__( + self, + type_: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type_, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "InputRecordSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `fields`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + fields=fields, + type_=type_, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class InputEnumSchema(EnumSchema, InputSchema): + name: str + + def __init__( + self, + symbols: Any, + type_: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type_, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "InputEnumSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) + + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + name=name, + symbols=symbols, + type_=type_, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class InputArraySchema(CWLArraySchema, InputSchema): + name: str + + def __init__( + self, + items: Any, + type_: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "InputArraySchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class OutputRecordField(CWLRecordField, FieldBase, OutputFormat): + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type_, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "OutputRecordField": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] + ) + + +class OutputRecordSchema(CWLRecordSchema, OutputSchema): + name: str + + def __init__( + self, + type_: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordSchema): + return bool( + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type_, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "OutputRecordSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `fields`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + fields=fields, + type_=type_, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class OutputEnumSchema(EnumSchema, OutputSchema): + name: str + + def __init__( + self, + symbols: Any, + type_: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type_, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "OutputEnumSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) + + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + name=name, + symbols=symbols, + type_=type_, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class OutputArraySchema(CWLArraySchema, OutputSchema): + name: str + + def __init__( + self, + items: Any, + type_: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputArraySchema): + return bool( + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "OutputArraySchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class InputParameter(Parameter, InputFormat, LoadContents): + pass + + +class OutputParameter(Parameter, OutputFormat): + pass + + +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled + before executing a process. See [`Process.hints`](#process) and + [`Process.requirements`](#process). + + Process requirements are the primary mechanism for specifying extensions to + the CWL core specification. + + """ + + pass + + +class Process(Identified, Labeled, Documented): + """ + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. + If this requirement is not present, the workflow platform must not perform expression + interpolation. + + """ + + def __init__( + self, + expressionLib: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InlineJavascriptRequirement" + self.expressionLib = expressionLib + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InlineJavascriptRequirement): + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.expressionLib)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "InlineJavascriptRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_InlineJavascriptRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + expressionLib = None + if "expressionLib" in _doc: + try: + expressionLib = load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("expressionLib") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `expressionLib`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("expressionLib") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), + [e], + detailed_message=f"the `expressionLib` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + expressionLib=expressionLib, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.expressionLib is not None: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "expressionLib"]) + + +class CommandInputSchema(Saveable): + pass + + +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when + interpreting the `inputs` and `outputs` fields. When a `type` field + contains a IRI, the implementation must check if the type is defined in + `schemaDefs` and use that definition. If the type is not found in + `schemaDefs`, it is an error. The entries in `schemaDefs` must be + processed in the order listed such that later schema definitions may refer + to earlier schema definitions. + + - **Type definitions are allowed for `enum` and `record` types only.** + - Type definitions may be shared by defining them in a file and then + `$include`-ing them in the `types` field. + - A file can contain a list of type definitions + + """ + + def __init__( + self, + types: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SchemaDefRequirement" + self.types = types + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.types)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "SchemaDefRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_SchemaDefRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("types") is None: + raise ValidationException("missing required field `types`", None, []) + + types = load_field( + _doc.get("types"), + array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("types") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `types`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("types") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), + [e], + detailed_message=f"the `types` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + types=types, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.types is not None: + r["types"] = save( + self.types, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "types"]) + + +class SecondaryFileSchema(Saveable): + """ + Secondary files are specified using the following micro-DSL for secondary files: + + * If the value is a string, it is transformed to an object with two fields + `pattern` and `required` + * By default, the value of `required` is `null` + (this indicates default behavior, which may be based on the context) + * If the value ends with a question mark `?` the question mark is + stripped off and the value of the field `required` is set to `False` + * The remaining value is assigned to the field `pattern` + + For implementation details and examples, please see + [this section](SchemaSalad.html#Domain_Specific_Language_for_secondary_files) + in the Schema Salad specification. + + """ + + def __init__( + self, + pattern: Any, + required: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.pattern = pattern + self.required = required + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SecondaryFileSchema): + return bool( + self.pattern == other.pattern and self.required == other.required + ) + return False + + def __hash__(self) -> int: + return hash((self.pattern, self.required)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "SecondaryFileSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("pattern") is None: + raise ValidationException("missing required field `pattern`", None, []) + + pattern = load_field( + _doc.get("pattern"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("pattern") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `pattern`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("pattern") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `pattern` field is not valid because:", + SourceLine(_doc, "pattern", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `pattern` field is not valid because:", + SourceLine(_doc, "pattern", str), + [e], + detailed_message=f"the `pattern` field with value `{val}` " + "is not valid because:", + ) + ) + required = None + if "required" in _doc: + try: + required = load_field( + _doc.get("required"), + union_of_None_type_or_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("required") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `required`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("required") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `required` field is not valid because:", + SourceLine(_doc, "required", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `required` field is not valid because:", + SourceLine(_doc, "required", str), + [e], + detailed_message=f"the `required` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `pattern`, `required`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + pattern=pattern, + required=required, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.pattern is not None: + r["pattern"] = save( + self.pattern, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.required is not None: + r["required"] = save( + self.required, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["pattern", "required"]) + + +class LoadListingRequirement(ProcessRequirement): + """ + Specify the desired behavior for loading the `listing` field of + a Directory object for use by expressions. + + """ + + def __init__( + self, + loadListing: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "LoadListingRequirement" + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, LoadListingRequirement): + return bool( + self.class_ == other.class_ and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.loadListing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "LoadListingRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_LoadListingRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + loadListing = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "loadListing"]) + + +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment + by the workflow platform when executing the command line tool. May be the + result of executing an expression, such as getting a parameter from input. + + """ + + def __init__( + self, + envName: Any, + envValue: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.envName = envName + self.envValue = envValue + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvironmentDef): + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) + return False + + def __hash__(self) -> int: + return hash((self.envName, self.envValue)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "EnvironmentDef": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("envName") is None: + raise ValidationException("missing required field `envName`", None, []) + + envName = load_field( + _doc.get("envName"), + strtype, + baseuri, + loadingOptions, + lc=_doc.get("envName") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `envName`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("envName") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), + [e], + detailed_message=f"the `envName` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("envValue") is None: + raise ValidationException("missing required field `envValue`", None, []) + + envValue = load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("envValue") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `envValue`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("envValue") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), + [e], + detailed_message=f"the `envValue` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + envName=envName, + envValue=envValue, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.envName is not None: + r["envName"] = save( + self.envName, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.envValue is not None: + r["envValue"] = save( + self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["envName", "envValue"]) + + +class CommandLineBinding(InputBinding): + """ + + When listed under `inputBinding` in the input schema, the term + "value" refers to the corresponding value in the input object. For + binding objects listed in `CommandLineTool.arguments`, the term "value" + refers to the effective value after evaluating `valueFrom`. + + The binding behavior when building the command line depends on the data + type of the value. If there is a mismatch between the type described by + the input schema and the effective value, such as resulting from an + expression evaluation, an implementation must use the data type of the + effective value. + + - **string**: Add `prefix` and the string to the command line. + + - **number**: Add `prefix` and decimal representation to command line. + + - **boolean**: If true, add `prefix` to the command line. If false, add + nothing. + + - **File**: Add `prefix` and the value of + [`File.path`](#File) to the command line. + + - **Directory**: Add `prefix` and the value of + [`Directory.path`](#Directory) to the command line. + + - **array**: If `itemSeparator` is specified, add `prefix` and the join + the array into a single string with `itemSeparator` separating the + items. Otherwise, first add `prefix`, then recursively process + individual elements. + If the array is empty, it does not add anything to command line. + + - **object**: Add `prefix` only, and recursively add object fields for + which `inputBinding` is specified. + + - **null**: Add nothing. + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + position: Optional[Any] = None, + prefix: Optional[Any] = None, + separate: Optional[Any] = None, + itemSeparator: Optional[Any] = None, + valueFrom: Optional[Any] = None, + shellQuote: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBinding): + return bool( + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandLineBinding": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + position = None + if "position" in _doc: + try: + position = load_field( + _doc.get("position"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("position") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `position`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("position") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), + [e], + detailed_message=f"the `position` field with value `{val}` " + "is not valid because:", + ) + ) + prefix = None + if "prefix" in _doc: + try: + prefix = load_field( + _doc.get("prefix"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("prefix") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `prefix`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("prefix") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), + [e], + detailed_message=f"the `prefix` field with value `{val}` " + "is not valid because:", + ) + ) + separate = None + if "separate" in _doc: + try: + separate = load_field( + _doc.get("separate"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("separate") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `separate`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("separate") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), + [e], + detailed_message=f"the `separate` field with value `{val}` " + "is not valid because:", + ) + ) + itemSeparator = None + if "itemSeparator" in _doc: + try: + itemSeparator = load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("itemSeparator") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `itemSeparator`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("itemSeparator") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [e], + detailed_message=f"the `itemSeparator` field with value `{val}` " + "is not valid because:", + ) + ) + valueFrom = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("valueFrom") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `valueFrom`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("valueFrom") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + detailed_message=f"the `valueFrom` field with value `{val}` " + "is not valid because:", + ) + ) + shellQuote = None + if "shellQuote" in _doc: + try: + shellQuote = load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("shellQuote") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `shellQuote`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("shellQuote") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [e], + detailed_message=f"the `shellQuote` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.position is not None: + r["position"] = save( + self.position, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.prefix is not None: + r["prefix"] = save( + self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.separate is not None: + r["separate"] = save( + self.separate, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.itemSeparator is not None: + r["itemSeparator"] = save( + self.itemSeparator, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.shellQuote is not None: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", + ] + ) + + +class CommandOutputBinding(LoadContents): + """ + Describes how to generate an output parameter based on the files produced + by a CommandLineTool. + + The output parameter value is generated by applying these operations in the + following order: + + - glob + - loadContents + - outputEval + - secondaryFiles + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + glob: Optional[Any] = None, + outputEval: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.loadListing = loadListing + self.glob = glob + self.outputEval = outputEval + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputBinding): + return bool( + self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.glob == other.glob + and self.outputEval == other.outputEval + ) + return False + + def __hash__(self) -> int: + return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandOutputBinding": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + glob = None + if "glob" in _doc: + try: + glob = load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("glob") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `glob`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("glob") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [e], + detailed_message=f"the `glob` field with value `{val}` " + "is not valid because:", + ) + ) + outputEval = None + if "outputEval" in _doc: + try: + outputEval = load_field( + _doc.get("outputEval"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputEval") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputEval`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputEval") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), + [e], + detailed_message=f"the `outputEval` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + loadContents=loadContents, + loadListing=loadListing, + glob=glob, + outputEval=outputEval, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.glob is not None: + r["glob"] = save( + self.glob, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.outputEval is not None: + r["outputEval"] = save( + self.outputEval, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) + + +class CommandLineBindable(Saveable): + def __init__( + self, + inputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBindable): + return bool(self.inputBinding == other.inputBinding) + return False + + def __hash__(self) -> int: + return hash((self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandLineBindable": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["inputBinding"]) + + +class CommandInputRecordField(InputRecordField, CommandLineBindable): + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type_, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandInputRecordField": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + "inputBinding", + ] + ) + + +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): + name: str + + def __init__( + self, + type_: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordSchema): + return bool( + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.fields, + self.type_, + self.label, + self.doc, + self.name, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandInputRecordSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `fields`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + fields=fields, + type_=type_, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): + name: str + + def __init__( + self, + symbols: Any, + type_: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.doc = doc + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.name, + self.symbols, + self.type_, + self.label, + self.doc, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandInputEnumSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) + + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + name=name, + symbols=symbols, + type_=type_, + label=label, + doc=doc, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc", "inputBinding"]) + + +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable +): + name: str + + def __init__( + self, + items: Any, + type_: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputArraySchema): + return bool( + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.items, self.type_, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandInputArraySchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandOutputRecordField(OutputRecordField): + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type_, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandOutputRecordField": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + outputBinding = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + detailed_message=f"the `outputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "outputBinding", + ] + ) + + +class CommandOutputRecordSchema(OutputRecordSchema): + name: str + + def __init__( + self, + type_: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordSchema): + return bool( + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type_, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandOutputRecordSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `fields`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + fields=fields, + type_=type_, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class CommandOutputEnumSchema(OutputEnumSchema): + name: str + + def __init__( + self, + symbols: Any, + type_: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type_, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandOutputEnumSchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) + + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + name=name, + symbols=symbols, + type_=type_, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class CommandOutputArraySchema(OutputArraySchema): + name: str + + def __init__( + self, + items: Any, + type_: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputArraySchema): + return bool( + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandOutputArraySchema": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + """ + + id: str + + def __init__( + self, + type_: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type_ = type_ + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type_ == other.type_ + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type_, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandInputParameter": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + default = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `default`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type_=type_, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class CommandOutputParameter(OutputParameter): + """ + An output parameter for a CommandLineTool. + """ + + id: str + + def __init__( + self, + type_: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.type_ = type_ + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type_ == other.type_ + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type_, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandOutputParameter": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + outputBinding = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + detailed_message=f"the `outputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type_=type_, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "type", + "outputBinding", + ] + ) + + +class CommandLineTool(Process): + """ + This defines the schema of the CWL Command Line Tool Description document. + + """ + + id: str + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + baseCommand: Optional[Any] = None, + arguments: Optional[Any] = None, + stdin: Optional[Any] = None, + stderr: Optional[Any] = None, + stdout: Optional[Any] = None, + successCodes: Optional[Any] = None, + temporaryFailCodes: Optional[Any] = None, + permanentFailCodes: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "CommandLineTool": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_CommandLineTool_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) + + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + detailed_message=f"the `inputs` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) + + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + detailed_message=f"the `outputs` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + cwlVersion = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("cwlVersion") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `cwlVersion`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("cwlVersion") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + detailed_message=f"the `cwlVersion` field with value `{val}` " + "is not valid because:", + ) + ) + intent = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("intent") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `intent`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("intent") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + detailed_message=f"the `intent` field with value `{val}` " + "is not valid because:", + ) + ) + baseCommand = None + if "baseCommand" in _doc: + try: + baseCommand = load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("baseCommand") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `baseCommand`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("baseCommand") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), + [e], + detailed_message=f"the `baseCommand` field with value `{val}` " + "is not valid because:", + ) + ) + arguments = None + if "arguments" in _doc: + try: + arguments = load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("arguments") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `arguments`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("arguments") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), + [e], + detailed_message=f"the `arguments` field with value `{val}` " + "is not valid because:", + ) + ) + stdin = None + if "stdin" in _doc: + try: + stdin = load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("stdin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `stdin`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("stdin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), + [e], + detailed_message=f"the `stdin` field with value `{val}` " + "is not valid because:", + ) + ) + stderr = None + if "stderr" in _doc: + try: + stderr = load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("stderr") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `stderr`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("stderr") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), + [e], + detailed_message=f"the `stderr` field with value `{val}` " + "is not valid because:", + ) + ) + stdout = None + if "stdout" in _doc: + try: + stdout = load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("stdout") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `stdout`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("stdout") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), + [e], + detailed_message=f"the `stdout` field with value `{val}` " + "is not valid because:", + ) + ) + successCodes = None + if "successCodes" in _doc: + try: + successCodes = load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + lc=_doc.get("successCodes") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `successCodes`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("successCodes") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), + [e], + detailed_message=f"the `successCodes` field with value `{val}` " + "is not valid because:", + ) + ) + temporaryFailCodes = None + if "temporaryFailCodes" in _doc: + try: + temporaryFailCodes = load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + lc=_doc.get("temporaryFailCodes") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `temporaryFailCodes`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("temporaryFailCodes") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [e], + detailed_message=f"the `temporaryFailCodes` field with value `{val}` " + "is not valid because:", + ) + ) + permanentFailCodes = None + if "permanentFailCodes" in _doc: + try: + permanentFailCodes = load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + lc=_doc.get("permanentFailCodes") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `permanentFailCodes`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("permanentFailCodes") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [e], + detailed_message=f"the `permanentFailCodes` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.intent is not None: + u = save_relative_uri(self.intent, self.id, True, None, relative_uris) + r["intent"] = u + if self.baseCommand is not None: + r["baseCommand"] = save( + self.baseCommand, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.arguments is not None: + r["arguments"] = save( + self.arguments, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stdin is not None: + r["stdin"] = save( + self.stdin, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stderr is not None: + r["stderr"] = save( + self.stderr, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stdout is not None: + r["stdout"] = save( + self.stdout, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.successCodes is not None: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.temporaryFailCodes is not None: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.permanentFailCodes is not None: + r["permanentFailCodes"] = save( + self.permanentFailCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", + ] + ) + + +class DockerRequirement(ProcessRequirement): + """ + Indicates that a workflow component should be run in a + [Docker](https://docker.com) or Docker-compatible (such as + [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and + specifies how to fetch or build the image. + + If a CommandLineTool lists `DockerRequirement` under + `hints` (or `requirements`), it may (or must) be run in the specified Docker + container. + + The platform must first acquire or install the correct Docker image as + specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. + + The platform must execute the tool in the container using `docker run` with + the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output + directory through the use of volume bind mounts. The platform should rewrite + file paths in the input object to correspond to the Docker bind mounted + locations. That is, the platform should rewrite values in the parameter context + such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths + within the container. The platform must ensure that `runtime.outdir` and + `runtime.tmpdir` are distinct directories. + + When running a tool contained in Docker, the workflow platform must not + assume anything about the contents of the Docker container, such as the + presence or absence of specific software, except to assume that the + generated command line represents a valid command within the runtime + environment of the container. + + A container image may specify an + [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) + and/or + [CMD](https://docs.docker.com/engine/reference/builder/#cmd). + Command line arguments will be appended after all elements of + ENTRYPOINT, and will override all elements specified using CMD (in + other words, CMD is only used when the CommandLineTool definition + produces an empty command line). + + Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility + concerns of the implicit hidden execution point (For further discussion, see + [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable + CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. + CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the + `requirements` section. + + ## Interaction with other requirements + + If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a + DockerRequirement, the environment variables must be provided to Docker + using `--env` or `--env-file` and interact with the container's preexisting + environment as defined by Docker. + + """ + + def __init__( + self, + dockerPull: Optional[Any] = None, + dockerLoad: Optional[Any] = None, + dockerFile: Optional[Any] = None, + dockerImport: Optional[Any] = None, + dockerImageId: Optional[Any] = None, + dockerOutputDirectory: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DockerRequirement): + return bool( + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "DockerRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_DockerRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + dockerPull = None + if "dockerPull" in _doc: + try: + dockerPull = load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dockerPull") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dockerPull`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("dockerPull") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), + [e], + detailed_message=f"the `dockerPull` field with value `{val}` " + "is not valid because:", + ) + ) + dockerLoad = None + if "dockerLoad" in _doc: + try: + dockerLoad = load_field( + _doc.get("dockerLoad"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dockerLoad") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dockerLoad`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("dockerLoad") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), + [e], + detailed_message=f"the `dockerLoad` field with value `{val}` " + "is not valid because:", + ) + ) + dockerFile = None + if "dockerFile" in _doc: + try: + dockerFile = load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dockerFile") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dockerFile`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("dockerFile") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), + [e], + detailed_message=f"the `dockerFile` field with value `{val}` " + "is not valid because:", + ) + ) + dockerImport = None + if "dockerImport" in _doc: + try: + dockerImport = load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dockerImport") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dockerImport`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("dockerImport") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), + [e], + detailed_message=f"the `dockerImport` field with value `{val}` " + "is not valid because:", + ) + ) + dockerImageId = None + if "dockerImageId" in _doc: + try: + dockerImageId = load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dockerImageId") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dockerImageId`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("dockerImageId") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), + [e], + detailed_message=f"the `dockerImageId` field with value `{val}` " + "is not valid because:", + ) + ) + dockerOutputDirectory = None + if "dockerOutputDirectory" in _doc: + try: + dockerOutputDirectory = load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dockerOutputDirectory") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dockerOutputDirectory`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("dockerOutputDirectory") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), + [e], + detailed_message=f"the `dockerOutputDirectory` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.dockerPull is not None: + r["dockerPull"] = save( + self.dockerPull, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.dockerLoad is not None: + r["dockerLoad"] = save( + self.dockerLoad, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.dockerFile is not None: + r["dockerFile"] = save( + self.dockerFile, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.dockerImport is not None: + r["dockerImport"] = save( + self.dockerImport, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.dockerImageId is not None: + r["dockerImageId"] = save( + self.dockerImageId, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.dockerOutputDirectory is not None: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", + ] + ) + + +class SoftwareRequirement(ProcessRequirement): + """ + A list of software packages that should be configured in the environment of + the defined process. + + """ + + def __init__( + self, + packages: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SoftwareRequirement" + self.packages = packages + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.packages)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "SoftwareRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_SoftwareRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("packages") is None: + raise ValidationException("missing required field `packages`", None, []) + + packages = load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, + baseuri, + loadingOptions, + lc=_doc.get("packages") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `packages`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("packages") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), + [e], + detailed_message=f"the `packages` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.packages is not None: + r["packages"] = save( + self.packages, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Optional[Any] = None, + specs: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "SoftwarePackage": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("package") is None: + raise ValidationException("missing required field `package`", None, []) + + package = load_field( + _doc.get("package"), + strtype, + baseuri, + loadingOptions, + lc=_doc.get("package") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `package`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("package") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [e], + detailed_message=f"the `package` field with value `{val}` " + "is not valid because:", + ) + ) + version = None + if "version" in _doc: + try: + version = load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("version") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `version`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("version") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), + [e], + detailed_message=f"the `version` field with value `{val}` " + "is not valid because:", + ) + ) + specs = None + if "specs" in _doc: + try: + specs = load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("specs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `specs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("specs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), + [e], + detailed_message=f"the `specs` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + package=package, + version=version, + specs=specs, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.package is not None: + r["package"] = save( + self.package, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.version is not None: + r["version"] = save( + self.version, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.specs is not None: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["package", "version", "specs"]) + + +class Dirent(Saveable): + """ + Define a file or subdirectory that must be staged to a particular + place prior to executing the command line tool. May be the result + of executing an expression, such as building a configuration file + from a template. + + Usually files are staged within the [designated output directory](#Runtime_environment). + However, under certain circumstances, files may be staged at + arbitrary locations, see discussion for `entryname`. + + """ + + def __init__( + self, + entry: Any, + entryname: Optional[Any] = None, + writable: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.entryname = entryname + self.entry = entry + self.writable = writable + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) + return False + + def __hash__(self) -> int: + return hash((self.entryname, self.entry, self.writable)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "Dirent": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + entryname = None + if "entryname" in _doc: + try: + entryname = load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("entryname") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `entryname`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("entryname") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), + [e], + detailed_message=f"the `entryname` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("entry") is None: + raise ValidationException("missing required field `entry`", None, []) + + entry = load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("entry") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `entry`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("entry") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [e], + detailed_message=f"the `entry` field with value `{val}` " + "is not valid because:", + ) + ) + writable = None + if "writable" in _doc: + try: + writable = load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("writable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `writable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("writable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), + [e], + detailed_message=f"the `writable` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + entryname=entryname, + entry=entry, + writable=writable, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.entryname is not None: + r["entryname"] = save( + self.entryname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.entry is not None: + r["entry"] = save( + self.entry, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.writable is not None: + r["writable"] = save( + self.writable, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["entryname", "entry", "writable"]) + + +class InitialWorkDirRequirement(ProcessRequirement): + """ + Define a list of files and subdirectories that must be staged by the workflow platform prior to executing the command line tool. + Normally files are staged within the designated output directory. However, when running inside containers, files may be staged at arbitrary locations, see discussion for [`Dirent.entryname`](#Dirent). Together with `DockerRequirement.dockerOutputDirectory` it is possible to control the locations of both input and output files when running in containers. + """ + + def __init__( + self, + listing: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InitialWorkDirRequirement" + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.listing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "InitialWorkDirRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_InitialWorkDirRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("listing") is None: + raise ValidationException("missing required field `listing`", None, []) + + listing = load_field( + _doc.get("listing"), + union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("listing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `listing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("listing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + detailed_message=f"the `listing` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.listing is not None: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "listing"]) + + +class EnvVarRequirement(ProcessRequirement): + """ + Define a list of environment variables which will be set in the + execution environment of the tool. See `EnvironmentDef` for details. + + """ + + def __init__( + self, + envDef: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "EnvVarRequirement" + self.envDef = envDef + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.envDef)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "EnvVarRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_EnvVarRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("envDef") is None: + raise ValidationException("missing required field `envDef`", None, []) + + envDef = load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, + baseuri, + loadingOptions, + lc=_doc.get("envDef") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `envDef`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("envDef") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), + [e], + detailed_message=f"the `envDef` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + envDef=envDef, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.envDef is not None: + r["envDef"] = save( + self.envDef, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "envDef"]) + + +class ShellCommandRequirement(ProcessRequirement): + """ + Modify the behavior of CommandLineTool to generate a single string + containing a shell command line. Each item in the `arguments` list must + be joined into a string separated by single spaces and quoted to prevent + interpretation by the shell, unless `CommandLineBinding` for that argument + contains `shellQuote: false`. If `shellQuote: false` is specified, the + argument is joined into the command string without quoting, which allows + the use of shell metacharacters such as `|` for pipes. + + """ + + def __init__( + self, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ShellCommandRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "ShellCommandRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_ShellCommandRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ResourceRequirement(ProcessRequirement): + """ + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to + schedule a job. If "min" cannot be satisfied, the job should not + be run. + + "max" is the maximum amount of a resource that the job shall be + allocated. If a node has sufficient resources, multiple jobs may + be scheduled on a single node provided each job's "max" resource + requirements are met. If a job attempts to exceed its resource + allocation, an implementation may deny additional resources, which + may result in job failure. + + If both "min" and "max" are specified, an implementation may + choose to allocate any amount between "min" and "max", with the + actual allocation provided in the `runtime` object. + + If "min" is specified but "max" is not, then "max" == "min" + If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, use the default values below. + + """ + + def __init__( + self, + coresMin: Optional[Any] = None, + coresMax: Optional[Any] = None, + ramMin: Optional[Any] = None, + ramMax: Optional[Any] = None, + tmpdirMin: Optional[Any] = None, + tmpdirMax: Optional[Any] = None, + outdirMin: Optional[Any] = None, + outdirMax: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ResourceRequirement): + return bool( + self.class_ == other.class_ + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "ResourceRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_ResourceRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + coresMin = None + if "coresMin" in _doc: + try: + coresMin = load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("coresMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `coresMin`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("coresMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), + [e], + detailed_message=f"the `coresMin` field with value `{val}` " + "is not valid because:", + ) + ) + coresMax = None + if "coresMax" in _doc: + try: + coresMax = load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("coresMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `coresMax`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("coresMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [e], + detailed_message=f"the `coresMax` field with value `{val}` " + "is not valid because:", + ) + ) + ramMin = None + if "ramMin" in _doc: + try: + ramMin = load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("ramMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `ramMin`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("ramMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [e], + detailed_message=f"the `ramMin` field with value `{val}` " + "is not valid because:", + ) + ) + ramMax = None + if "ramMax" in _doc: + try: + ramMax = load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("ramMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `ramMax`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("ramMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), + [e], + detailed_message=f"the `ramMax` field with value `{val}` " + "is not valid because:", + ) + ) + tmpdirMin = None + if "tmpdirMin" in _doc: + try: + tmpdirMin = load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("tmpdirMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `tmpdirMin`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("tmpdirMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [e], + detailed_message=f"the `tmpdirMin` field with value `{val}` " + "is not valid because:", + ) + ) + tmpdirMax = None + if "tmpdirMax" in _doc: + try: + tmpdirMax = load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("tmpdirMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `tmpdirMax`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("tmpdirMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [e], + detailed_message=f"the `tmpdirMax` field with value `{val}` " + "is not valid because:", + ) + ) + outdirMin = None + if "outdirMin" in _doc: + try: + outdirMin = load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outdirMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outdirMin`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outdirMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + detailed_message=f"the `outdirMin` field with value `{val}` " + "is not valid because:", + ) + ) + outdirMax = None + if "outdirMax" in _doc: + try: + outdirMax = load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outdirMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outdirMax`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outdirMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + detailed_message=f"the `outdirMax` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.coresMin is not None: + r["coresMin"] = save( + self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.coresMax is not None: + r["coresMax"] = save( + self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.ramMin is not None: + r["ramMin"] = save( + self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.ramMax is not None: + r["ramMax"] = save( + self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.tmpdirMin is not None: + r["tmpdirMin"] = save( + self.tmpdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.tmpdirMax is not None: + r["tmpdirMax"] = save( + self.tmpdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.outdirMin is not None: + r["outdirMin"] = save( + self.outdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.outdirMax is not None: + r["outdirMax"] = save( + self.outdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) + + +class WorkReuse(ProcessRequirement): + """ + For implementations that support reusing output from past work (on + the assumption that same code and same input produce same + results), control whether to enable or disable the reuse behavior + for a particular tool or step (to accommodate situations where that + assumption is incorrect). A reused step is not executed but + instead returns the same output as the original execution. + + If `WorkReuse` is not specified, correct tools should assume it + is enabled by default. + + """ + + def __init__( + self, + enableReuse: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "WorkReuse" + self.enableReuse = enableReuse + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkReuse): + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.enableReuse)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "WorkReuse": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_WorkReuse_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("enableReuse") is None: + raise ValidationException("missing required field `enableReuse`", None, []) + + enableReuse = load_field( + _doc.get("enableReuse"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("enableReuse") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `enableReuse`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("enableReuse") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `enableReuse` field is not valid because:", + SourceLine(_doc, "enableReuse", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `enableReuse` field is not valid because:", + SourceLine(_doc, "enableReuse", str), + [e], + detailed_message=f"the `enableReuse` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + enableReuse=enableReuse, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.enableReuse is not None: + r["enableReuse"] = save( + self.enableReuse, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "enableReuse"]) + + +class NetworkAccess(ProcessRequirement): + """ + Indicate whether a process requires outgoing IPv4/IPv6 network + access. Choice of IPv4 or IPv6 is implementation and site + specific, correct tools must support both. + + If `networkAccess` is false or not specified, tools must not + assume network access, except for localhost (the loopback device). + + If `networkAccess` is true, the tool must be able to make outgoing + connections to network resources. Resources may be on a private + subnet or the public Internet. However, implementations and sites + may apply their own security policies to restrict what is + accessible by the tool. + + Enabling network access does not imply a publicly routable IP + address or the ability to accept inbound connections. + + """ + + def __init__( + self, + networkAccess: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "NetworkAccess" + self.networkAccess = networkAccess + + def __eq__(self, other: Any) -> bool: + if isinstance(other, NetworkAccess): + return bool( + self.class_ == other.class_ + and self.networkAccess == other.networkAccess + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.networkAccess)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "NetworkAccess": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_NetworkAccess_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("networkAccess") is None: + raise ValidationException("missing required field `networkAccess`", None, []) + + networkAccess = load_field( + _doc.get("networkAccess"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("networkAccess") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `networkAccess`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("networkAccess") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `networkAccess` field is not valid because:", + SourceLine(_doc, "networkAccess", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `networkAccess` field is not valid because:", + SourceLine(_doc, "networkAccess", str), + [e], + detailed_message=f"the `networkAccess` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `networkAccess`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + networkAccess=networkAccess, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.networkAccess is not None: + r["networkAccess"] = save( + self.networkAccess, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "networkAccess"]) + + +class InplaceUpdateRequirement(ProcessRequirement): + """ + + If `inplaceUpdate` is true, then an implementation supporting this + feature may permit tools to directly update files with `writable: + true` in InitialWorkDirRequirement. That is, as an optimization, + files may be destructively modified in place as opposed to copied + and updated. + + An implementation must ensure that only one workflow step may + access a writable file at a time. It is an error if a file which + is writable by one workflow step file is accessed (for reading or + writing) by any other workflow step running independently. + However, a file which has been updated in a previous completed + step may be used as input to multiple steps, provided it is + read-only in every step. + + Workflow steps which modify a file must produce the modified file + as output. Downstream steps which further process the file must + use the output of previous steps, and not refer to a common input + (this is necessary for both ordering and correctness). + + Workflow authors should provide this in the `hints` section. The + intent of this feature is that workflows produce the same results + whether or not InplaceUpdateRequirement is supported by the + implementation, and this feature is primarily available as an + optimization for particular environments. + + Users and implementers should be aware that workflows that + destructively modify inputs may not be repeatable or reproducible. + In particular, enabling this feature implies that WorkReuse should + not be enabled. + + """ + + def __init__( + self, + inplaceUpdate: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InplaceUpdateRequirement" + self.inplaceUpdate = inplaceUpdate + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InplaceUpdateRequirement): + return bool( + self.class_ == other.class_ + and self.inplaceUpdate == other.inplaceUpdate + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.inplaceUpdate)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "InplaceUpdateRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_InplaceUpdateRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("inplaceUpdate") is None: + raise ValidationException("missing required field `inplaceUpdate`", None, []) + + inplaceUpdate = load_field( + _doc.get("inplaceUpdate"), + booltype, + baseuri, + loadingOptions, + lc=_doc.get("inplaceUpdate") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inplaceUpdate`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inplaceUpdate") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inplaceUpdate` field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inplaceUpdate` field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), + [e], + detailed_message=f"the `inplaceUpdate` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + inplaceUpdate=inplaceUpdate, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.inplaceUpdate is not None: + r["inplaceUpdate"] = save( + self.inplaceUpdate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "inplaceUpdate"]) + + +class ToolTimeLimit(ProcessRequirement): + """ + Set an upper limit on the execution time of a CommandLineTool. + A CommandLineTool whose execution duration exceeds the time + limit may be preemptively terminated and considered failed. + May also be used by batch systems to make scheduling decisions. + The execution duration excludes external operations, such as + staging of files, pulling a docker image etc, and only counts + wall-time for the execution of the command line itself. + + """ + + def __init__( + self, + timelimit: Any, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ToolTimeLimit" + self.timelimit = timelimit + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ToolTimeLimit): + return bool( + self.class_ == other.class_ and self.timelimit == other.timelimit + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.timelimit)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "ToolTimeLimit": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_ToolTimeLimit_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("timelimit") is None: + raise ValidationException("missing required field `timelimit`", None, []) + + timelimit = load_field( + _doc.get("timelimit"), + union_of_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("timelimit") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `timelimit`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("timelimit") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `timelimit` field is not valid because:", + SourceLine(_doc, "timelimit", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `timelimit` field is not valid because:", + SourceLine(_doc, "timelimit", str), + [e], + detailed_message=f"the `timelimit` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `timelimit`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + timelimit=timelimit, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.timelimit is not None: + r["timelimit"] = save( + self.timelimit, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "timelimit"]) + + +class ExpressionToolOutputParameter(OutputParameter): + id: str + + def __init__( + self, + type_: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionToolOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type_ == other.type_ + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type_, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "ExpressionToolOutputParameter": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class WorkflowInputParameter(InputParameter): + id: str + + def __init__( + self, + type_: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type_ = type_ + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type_ == other.type_ + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type_, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "WorkflowInputParameter": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + default = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `default`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_InputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type_=type_, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class ExpressionTool(Process): + """ + An ExpressionTool is a type of Process object that can be run by itself + or as a Workflow step. It executes a pure Javascript expression that has + access to the same input parameters as a workflow. It is meant to be used + sparingly as a way to isolate complex Javascript expressions that need to + operate on input data and produce some result; perhaps just a + rearrangement of the inputs. No Docker software container is required + or allowed. + + """ + + id: str + + def __init__( + self, + inputs: Any, + outputs: Any, + expression: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "ExpressionTool" + self.expression = expression + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.expression == other.expression + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.expression, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "ExpressionTool": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_ExpressionTool_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) + + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + detailed_message=f"the `inputs` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) + + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_ExpressionToolOutputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + detailed_message=f"the `outputs` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + cwlVersion = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("cwlVersion") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `cwlVersion`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("cwlVersion") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + detailed_message=f"the `cwlVersion` field with value `{val}` " + "is not valid because:", + ) + ) + intent = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("intent") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `intent`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("intent") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + detailed_message=f"the `intent` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("expression") is None: + raise ValidationException("missing required field `expression`", None, []) + + expression = load_field( + _doc.get("expression"), + ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("expression") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `expression`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("expression") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [e], + detailed_message=f"the `expression` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `expression`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + expression=expression, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.intent is not None: + u = save_relative_uri(self.intent, self.id, True, None, relative_uris) + r["intent"] = u + if self.expression is not None: + r["expression"] = save( + self.expression, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "expression", + ] + ) + + +class Sink(Saveable): + pass + + +class InputSink(Sink): + pass + + +class OutputSink(Sink): + pass + + +class WorkflowOutputParameter(OutputParameter, OutputSink): + """ + Describe an output parameter of a workflow. The parameter must be + connected to one or more parameters defined in the workflow that + will provide the value of the output parameter. It is legal to + connect a WorkflowInputParameter to a WorkflowOutputParameter. + + See [WorkflowStepInput](#WorkflowStepInput) for discussion of + `linkMerge` and `pickValue`. + + """ + + id: str + + def __init__( + self, + type_: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + outputSource: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.linkMerge = linkMerge + self.pickValue = pickValue + self.outputSource = outputSource + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.outputSource == other.outputSource + and self.type_ == other.type_ + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.linkMerge, + self.pickValue, + self.outputSource, + self.type_, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "WorkflowOutputParameter": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + linkMerge = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + lc=_doc.get("linkMerge") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `linkMerge`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("linkMerge") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + detailed_message=f"the `linkMerge` field with value `{val}` " + "is not valid because:", + ) + ) + pickValue = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + lc=_doc.get("pickValue") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `pickValue`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("pickValue") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `pickValue` field is not valid because:", + SourceLine(_doc, "pickValue", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `pickValue` field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + detailed_message=f"the `pickValue` field with value `{val}` " + "is not valid because:", + ) + ) + outputSource = None + if "outputSource" in _doc: + try: + outputSource = load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, + baseuri, + loadingOptions, + lc=_doc.get("outputSource") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputSource`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputSource") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), + [e], + detailed_message=f"the `outputSource` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `linkMerge`, `pickValue`, `outputSource`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + linkMerge=linkMerge, + pickValue=pickValue, + outputSource=outputSource, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.linkMerge is not None: + r["linkMerge"] = save( + self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.pickValue is not None: + r["pickValue"] = save( + self.pickValue, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputSource is not None: + u = save_relative_uri(self.outputSource, self.id, False, 1, relative_uris) + r["outputSource"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "linkMerge", + "pickValue", + "outputSource", + "type", + ] + ) + + +class WorkflowStepInput(Identified, InputSink, LoadContents, Labeled): + """ + The input of a workflow step connects an upstream parameter (from the + workflow inputs, or the outputs of other workflows steps) with the input + parameters of the process specified by the `run` field. Only input parameters + declared by the target process will be passed through at runtime to the process + though additional parameters may be specified (for use within `valueFrom` + expressions for instance) - unconnected or unused parameters do not represent an + error condition. + + # Input object + + A WorkflowStepInput object must contain an `id` field in the form + `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash + `/` the field name consists of the characters following the final slash + (the prefix portion may contain one or more slashes to indicate scope). + This defines a field of the workflow step input object with the value of + the `source` parameter(s). + + # Merging multiple inbound data links + + To merge multiple inbound data links, + [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a [workflow + scatter](#ScatterWorkflowStep) operation, there may be multiple inbound + data links listed in the `source` field. The values from the + input links are merged depending on the method specified in the + `linkMerge` field. If both `linkMerge` and `pickValue` are null + or not specified, and there is more than one element in the + `source` array, the default method is "merge_nested". + + If both `linkMerge` and `pickValue` are null or not specified, and + there is only a single element in the `source`, then the input + parameter takes the scalar value from the single input link (it is + *not* wrapped in a single-list). + + * **merge_nested** + + The input must be an array consisting of exactly one entry for each + input link. If "merge_nested" is specified with a single link, the value + from the link must be wrapped in a single-item list. + + * **merge_flattened** + + 1. The source and sink parameters must be compatible types, or the source + type must be compatible with single element from the "items" type of + the destination array parameter. + 2. Source parameters which are arrays are concatenated. + Source parameters which are single element types are appended as + single elements. + + # Picking non-null values among inbound data links + + If present, `pickValue` specifies how to pick non-null values among inbound data links. + + `pickValue` is evaluated + 1. Once all source values from upstream step or parameters are available. + 2. After `linkMerge`. + 3. Before `scatter` or `valueFrom`. + + This is specifically intended to be useful in combination with + [conditional execution](#AbstractWorkflowStep), where several upstream + steps may be connected to a single input (`source` is a list), and + skipped steps produce null values. + + Static type checkers should check for type consistency after inferring what the type + will be after `pickValue` is applied, just as they do currently for `linkMerge`. + + * **first_non_null** + + For the first level of a list input, pick the first non-null element. The result is a scalar. + It is an error if there is no non-null element. Examples: + * `[null, x, null, y] -> x` + * `[null, [null], null, y] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: If-else pattern where the + value comes either from a conditional step or from a default or + fallback value. The conditional step(s) should be placed first in + the list. + + * **the_only_non_null** + + For the first level of a list input, pick the single non-null element. The result is a scalar. + It is an error if there is more than one non-null element. Examples: + + * `[null, x, null] -> x` + * `[null, x, null, y] -> Runtime Error` + * `[null, [null], null] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: Switch type patterns where developer considers + more than one active code path as a workflow error + (possibly indicating an error in writing `when` condition expressions). + + * **all_non_null** + + For the first level of a list input, pick all non-null values. + The result is a list, which may be empty. Examples: + + * `[null, x, null] -> [x]` + * `[x, null, y] -> [x, y]` + * `[null, [x], [null]] -> [[x], [null]]` + * `[null, null, null] -> []` + + *Intended use case*: It is valid to have more than one source, but + sources are conditional, so null sources (from skipped steps) + should be filtered out. + + """ + + id: str + + def __init__( + self, + id: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + source: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + label: Optional[Any] = None, + default: Optional[Any] = None, + valueFrom: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.linkMerge = linkMerge + self.pickValue = pickValue + self.source = source + self.loadContents = loadContents + self.loadListing = loadListing + self.label = label + self.default = default + self.valueFrom = valueFrom + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepInput): + return bool( + self.id == other.id + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.source == other.source + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.label == other.label + and self.default == other.default + and self.valueFrom == other.valueFrom + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.linkMerge, + self.pickValue, + self.source, + self.loadContents, + self.loadListing, + self.label, + self.default, + self.valueFrom, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "WorkflowStepInput": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + linkMerge = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + lc=_doc.get("linkMerge") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `linkMerge`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("linkMerge") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + detailed_message=f"the `linkMerge` field with value `{val}` " + "is not valid because:", + ) + ) + pickValue = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + lc=_doc.get("pickValue") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `pickValue`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("pickValue") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `pickValue` field is not valid because:", + SourceLine(_doc, "pickValue", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `pickValue` field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + detailed_message=f"the `pickValue` field with value `{val}` " + "is not valid because:", + ) + ) + source = None + if "source" in _doc: + try: + source = load_field( + _doc.get("source"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None, + baseuri, + loadingOptions, + lc=_doc.get("source") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `source`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("source") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [e], + detailed_message=f"the `source` field with value `{val}` " + "is not valid because:", + ) + ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + default = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `default`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) + ) + valueFrom = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("valueFrom") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `valueFrom`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("valueFrom") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + detailed_message=f"the `valueFrom` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `linkMerge`, `pickValue`, `source`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + linkMerge=linkMerge, + pickValue=pickValue, + source=source, + loadContents=loadContents, + loadListing=loadListing, + label=label, + default=default, + valueFrom=valueFrom, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.linkMerge is not None: + r["linkMerge"] = save( + self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.pickValue is not None: + r["pickValue"] = save( + self.pickValue, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.source is not None: + u = save_relative_uri(self.source, self.id, False, 2, relative_uris) + r["source"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "linkMerge", + "pickValue", + "source", + "loadContents", + "loadListing", + "label", + "default", + "valueFrom", + ] + ) + + +class WorkflowStepOutput(Identified): + """ + Associate an output parameter of the underlying process with a workflow + parameter. The workflow parameter (given in the `id` field) be may be used + as a `source` to connect with input parameters of other workflow steps, or + with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is + the identifier to use in the `source` field of `WorkflowStepInput` + to connect the output value to downstream parameters. + + """ + + id: str + + def __init__( + self, + id: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) + return False + + def __hash__(self) -> int: + return hash((self.id)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "WorkflowStepOutput": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`".format(k), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["id"]) + + +class LoopInput(Identified, OutputSink): + id: str + + def __init__( + self, + id: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + outputSource: Optional[Any] = None, + default: Optional[Any] = None, + valueFrom: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.linkMerge = linkMerge + self.pickValue = pickValue + self.outputSource = outputSource + self.default = default + self.valueFrom = valueFrom + + def __eq__(self, other: Any) -> bool: + if isinstance(other, LoopInput): + return bool( + self.id == other.id + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.outputSource == other.outputSource + and self.default == other.default + and self.valueFrom == other.valueFrom + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.linkMerge, + self.pickValue, + self.outputSource, + self.default, + self.valueFrom, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "LoopInput": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + linkMerge = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + lc=_doc.get("linkMerge") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `linkMerge`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("linkMerge") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + detailed_message=f"the `linkMerge` field with value `{val}` " + "is not valid because:", + ) + ) + pickValue = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + lc=_doc.get("pickValue") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `pickValue`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("pickValue") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `pickValue` field is not valid because:", + SourceLine(_doc, "pickValue", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `pickValue` field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + detailed_message=f"the `pickValue` field with value `{val}` " + "is not valid because:", + ) + ) + outputSource = None + if "outputSource" in _doc: + try: + outputSource = load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, + baseuri, + loadingOptions, + lc=_doc.get("outputSource") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputSource`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputSource") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), + [e], + detailed_message=f"the `outputSource` field with value `{val}` " + "is not valid because:", + ) + ) + default = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `default`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) + ) + valueFrom = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("valueFrom") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `valueFrom`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("valueFrom") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + detailed_message=f"the `valueFrom` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `linkMerge`, `pickValue`, `outputSource`, `default`, `valueFrom`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + linkMerge=linkMerge, + pickValue=pickValue, + outputSource=outputSource, + default=default, + valueFrom=valueFrom, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.linkMerge is not None: + r["linkMerge"] = save( + self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.pickValue is not None: + r["pickValue"] = save( + self.pickValue, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputSource is not None: + u = save_relative_uri(self.outputSource, self.id, False, 1, relative_uris) + r["outputSource"] = u + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["id", "linkMerge", "pickValue", "outputSource", "default", "valueFrom"] + ) + + +class AbstractWorkflowStep(Identified, Labeled, Documented): + """ + A workflow step is an executable element of a workflow. It specifies the + underlying process implementation (such as `CommandLineTool` or another + `Workflow`) in the `run` field and connects the input and output parameters + of the underlying process to workflow parameters. + + # Conditional execution (Optional) + + Conditional execution makes execution of a step conditional on an + expression. A step that is not executed is "skipped". A skipped + step produces `null` for all output parameters. + + The `when` field controls conditional execution. This is an + expression that must be evaluated with `inputs` bound to the step + input object (or individual scatter job), and returns a boolean + value. It is an error if this expression returns a value other + than `true` or `false`. + + Conditional execution in CWL is an optional feature and is not required + to be implemented by all consumers of CWL documents. An implementation that + does not support conditional executions must return a fatal error when + attempting to execute a workflow that uses conditional constructs the + implementation does not support. + + # Subworkflows + + To specify a nested workflow as part of a workflow step, + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be + specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as + a subworkflow (recursive workflows are not allowed). + + """ + + pass + + +class WorkflowStep(AbstractWorkflowStep): + id: str + + def __init__( + self, + in_: Any, + out: Any, + run: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + when: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.when = when + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStep): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.when == other.when + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.when, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "WorkflowStep": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("in") is None: + raise ValidationException("missing required field `in`", None, []) + + in_ = load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + lc=_doc.get("in") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `in`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("in") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + detailed_message=f"the `in` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("out") is None: + raise ValidationException("missing required field `out`", None, []) + + out = load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("out") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `out`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("out") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [e], + detailed_message=f"the `out` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + try: + if _doc.get("run") is None: + raise ValidationException("missing required field `run`", None, []) + + run = load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None_None, + subscope_baseuri, + loadingOptions, + lc=_doc.get("run") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `run`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("run") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [e], + detailed_message=f"the `run` field with value `{val}` " + "is not valid because:", + ) + ) + when = None + if "when" in _doc: + try: + when = load_field( + _doc.get("when"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("when") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `when`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("when") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), + [e], + detailed_message=f"the `when` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + label=label, + doc=doc, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + when=when, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.in_ is not None: + r["in"] = save( + self.in_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.out is not None: + u = save_relative_uri(self.out, self.id, True, None, relative_uris) + r["out"] = u + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.run is not None: + u = save_relative_uri(self.run, self.id, False, None, relative_uris) + r["run"] = u + if self.when is not None: + r["when"] = save( + self.when, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["id", "label", "doc", "in", "out", "requirements", "hints", "run", "when"] + ) + + +class ScatterWorkflowStep(AbstractWorkflowStep): + """ + To use scatter/gather, + [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or + subworkflow should execute separately over a list of input elements. Each + job making up a scatter operation is independent and may be executed + concurrently. + + The `scatter` field specifies one or more input parameters which will be + scattered. An input parameter may be listed more than once. The declared + type of each input parameter implicitly becomes an array of items of the + input parameter type. If a parameter is listed more than once, it becomes + a nested array. As a result, upstream parameters which are connected to + scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job + in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are + set to empty arrays and no work is done for the step, according to + applicable scattering rules. + + If `scatter` declares more than one input parameter, `scatterMethod` + describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one + element taken from each array to construct each job. It is an error + if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output must be nested arrays for each level of scattering, in the + order that the input arrays are listed in the `scatter` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output arrays must be flattened to a single level, but otherwise listed in the + order that the input arrays are listed in the `scatter` field. + + # Conditional execution (Optional) + + The condition is evaluated after `scatter`, using the input object + of each individual scatter job. This means over a set of scatter + jobs, some may be executed and some may be skipped. When the + results are gathered, skipped steps must be `null` in the output + arrays. + + """ + + id: str + + def __init__( + self, + in_: Any, + out: Any, + run: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + when: Optional[Any] = None, + scatter: Optional[Any] = None, + scatterMethod: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.when = when + self.scatter = scatter + self.scatterMethod = scatterMethod + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ScatterWorkflowStep): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.when == other.when + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.when, + self.scatter, + self.scatterMethod, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "ScatterWorkflowStep": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("in") is None: + raise ValidationException("missing required field `in`", None, []) + + in_ = load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + lc=_doc.get("in") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `in`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("in") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + detailed_message=f"the `in` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("out") is None: + raise ValidationException("missing required field `out`", None, []) + + out = load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("out") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `out`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("out") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [e], + detailed_message=f"the `out` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + try: + if _doc.get("run") is None: + raise ValidationException("missing required field `run`", None, []) + + run = load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None_None, + subscope_baseuri, + loadingOptions, + lc=_doc.get("run") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `run`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("run") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [e], + detailed_message=f"the `run` field with value `{val}` " + "is not valid because:", + ) + ) + when = None + if "when" in _doc: + try: + when = load_field( + _doc.get("when"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("when") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `when`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("when") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), + [e], + detailed_message=f"the `when` field with value `{val}` " + "is not valid because:", + ) + ) + scatter = None + if "scatter" in _doc: + try: + scatter = load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None, + baseuri, + loadingOptions, + lc=_doc.get("scatter") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `scatter`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("scatter") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [e], + detailed_message=f"the `scatter` field with value `{val}` " + "is not valid because:", + ) + ) + scatterMethod = None + if "scatterMethod" in _doc: + try: + scatterMethod = load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("scatterMethod") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `scatterMethod`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("scatterMethod") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [e], + detailed_message=f"the `scatterMethod` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`, `scatter`, `scatterMethod`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + label=label, + doc=doc, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + when=when, + scatter=scatter, + scatterMethod=scatterMethod, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.in_ is not None: + r["in"] = save( + self.in_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.out is not None: + u = save_relative_uri(self.out, self.id, True, None, relative_uris) + r["out"] = u + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.run is not None: + u = save_relative_uri(self.run, self.id, False, None, relative_uris) + r["run"] = u + if self.when is not None: + r["when"] = save( + self.when, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.scatter is not None: + u = save_relative_uri(self.scatter, self.id, False, 0, relative_uris) + r["scatter"] = u + if self.scatterMethod is not None: + u = save_relative_uri( + self.scatterMethod, self.id, False, None, relative_uris + ) + r["scatterMethod"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "in", + "out", + "requirements", + "hints", + "run", + "when", + "scatter", + "scatterMethod", + ] + ) + + +class LoopWorkflowStep(AbstractWorkflowStep): + """ + # Iterative execution (Optional) + + The `loop` field controls iterative execution. It defines the input + parameters of the loop iterations after the first one (inputs of the + first iteration are the step input parameters, as usual). If no + `loop` rule is specified for a given step `in` field, the initial + value is kept constant among all iterations. + + When a `loop` field is present, the `when` field is mandatory. It is + evaluated before each loop iteration and acts as a termination condition: + as soon as the `when` expression evaluates to `false`, the loop terminates + and the step outputs are propagated to the subsequent workflow steps. + + The `outputMethod` field describes how to deal with loop outputs after + termination: + + * **last_iteration** specifies that only the last computed element for + each output parameter should be propagated to the subsequent steps. + This is the default value. + + * **all_iterations** specifies that an array with all output values + computed at the end of each loop iteration should be propagated to + the subsequent steps. Elements in the array must be ordered according + to the loop iterations that produced them. + + Iterative execution in CWL is an optional feature and is not required + to be implemented by all consumers of CWL documents. An implementation that + does not support iterative executions must return a fatal error when + attempting to execute a workflow that uses iterative constructs the + implementation does not support. + + """ + + id: str + + def __init__( + self, + in_: Any, + out: Any, + run: Any, + when: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + loop: Optional[Any] = None, + outputMethod: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.when = when + self.loop = loop + self.outputMethod = outputMethod + + def __eq__(self, other: Any) -> bool: + if isinstance(other, LoopWorkflowStep): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.when == other.when + and self.loop == other.loop + and self.outputMethod == other.outputMethod + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.when, + self.loop, + self.outputMethod, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "LoopWorkflowStep": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("in") is None: + raise ValidationException("missing required field `in`", None, []) + + in_ = load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + lc=_doc.get("in") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `in`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("in") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + detailed_message=f"the `in` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("out") is None: + raise ValidationException("missing required field `out`", None, []) + + out = load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("out") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `out`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("out") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [e], + detailed_message=f"the `out` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + try: + if _doc.get("run") is None: + raise ValidationException("missing required field `run`", None, []) + + run = load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None_None, + subscope_baseuri, + loadingOptions, + lc=_doc.get("run") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `run`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("run") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [e], + detailed_message=f"the `run` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("when") is None: + raise ValidationException("missing required field `when`", None, []) + + when = load_field( + _doc.get("when"), + union_of_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("when") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `when`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("when") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), + [e], + detailed_message=f"the `when` field with value `{val}` " + "is not valid because:", + ) + ) + loop = None + if "loop" in _doc: + try: + loop = load_field( + _doc.get("loop"), + idmap_loop_union_of_None_type_or_array_of_LoopInputLoader, + baseuri, + loadingOptions, + lc=_doc.get("loop") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loop`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loop") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loop` field is not valid because:", + SourceLine(_doc, "loop", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loop` field is not valid because:", + SourceLine(_doc, "loop", str), + [e], + detailed_message=f"the `loop` field with value `{val}` " + "is not valid because:", + ) + ) + outputMethod = None + if "outputMethod" in _doc: + try: + outputMethod = load_field( + _doc.get("outputMethod"), + uri_union_of_None_type_or_LoopOutputMethodLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("outputMethod") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputMethod`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputMethod") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputMethod` field is not valid because:", + SourceLine(_doc, "outputMethod", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputMethod` field is not valid because:", + SourceLine(_doc, "outputMethod", str), + [e], + detailed_message=f"the `outputMethod` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`, `loop`, `outputMethod`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + label=label, + doc=doc, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + when=when, + loop=loop, + outputMethod=outputMethod, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.in_ is not None: + r["in"] = save( + self.in_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.out is not None: + u = save_relative_uri(self.out, self.id, True, None, relative_uris) + r["out"] = u + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.run is not None: + u = save_relative_uri(self.run, self.id, False, None, relative_uris) + r["run"] = u + if self.when is not None: + r["when"] = save( + self.when, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.loop is not None: + r["loop"] = save( + self.loop, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputMethod is not None: + u = save_relative_uri( + self.outputMethod, self.id, False, None, relative_uris + ) + r["outputMethod"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "in", + "out", + "requirements", + "hints", + "run", + "when", + "loop", + "outputMethod", + ] + ) + + +class Workflow(Process): + """ + A workflow describes a set of **steps** and the **dependencies** between + those steps. When a step produces output that will be consumed by a + second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding + step and wait for it to successfully produce output before executing the + dependent step. If two steps are defined in the workflow graph that + are not directly or indirectly dependent, these steps are **independent**, + and may execute in any order or execute concurrently. A workflow is + complete when all steps have been executed. + + Dependencies between parameters are expressed using the `source` + field on [workflow step input parameters](#WorkflowStepInput) and + `outputSource` field on [workflow output + parameters](#WorkflowOutputParameter). + + The `source` field on each workflow step input parameter expresses + the data links that contribute to the value of the step input + parameter (the "sink"). A workflow step can only begin execution + when every data link connected to a step has been fulfilled. + + The `outputSource` field on each workflow step input parameter + expresses the data links that contribute to the value of the + workflow output parameter (the "sink"). Workflow execution cannot + complete successfully until every data link connected to an output + parameter has been fulfilled. + + ## Workflow success and failure + + A completed step must result in one of `success`, `temporaryFailure` or + `permanentFailure` states. An implementation may choose to retry a step + execution which resulted in `temporaryFailure`. An implementation may + choose to either continue running other steps of a workflow, or terminate + immediately upon `permanentFailure`. + + * If any step of a workflow execution results in `permanentFailure`, then + the workflow status is `permanentFailure`. + + * If one or more steps result in `temporaryFailure` and all other steps + complete `success` or are not executed, then the workflow status is + `temporaryFailure`. + + * If all workflow steps are executed and complete with `success`, then the + workflow status is `success`. + + # Extensions + + [ScatterFeatureRequirement](#ScatterFeatureRequirement) and + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are + available as standard [extensions](#Extensions_and_Metadata) to core + workflow semantics. + + """ + + id: str + + def __init__( + self, + inputs: Any, + outputs: Any, + steps: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "Workflow" + self.steps = steps + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Workflow): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.steps == other.steps + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.steps, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "Workflow": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_Workflow_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) + + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + detailed_message=f"the `inputs` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) + + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_WorkflowOutputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + detailed_message=f"the `outputs` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + cwlVersion = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("cwlVersion") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `cwlVersion`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("cwlVersion") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + detailed_message=f"the `cwlVersion` field with value `{val}` " + "is not valid because:", + ) + ) + intent = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("intent") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `intent`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("intent") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + detailed_message=f"the `intent` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("steps") is None: + raise ValidationException("missing required field `steps`", None, []) + + steps = load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_union_of_WorkflowStepLoader_or_ScatterWorkflowStepLoader_or_LoopWorkflowStepLoader, + baseuri, + loadingOptions, + lc=_doc.get("steps") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `steps`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("steps") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), + [e], + detailed_message=f"the `steps` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `steps`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + steps=steps, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.intent is not None: + u = save_relative_uri(self.intent, self.id, True, None, relative_uris) + r["intent"] = u + if self.steps is not None: + r["steps"] = save( + self.steps, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "steps", + ] + ) + + +class SubworkflowFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support nested workflows in + the `run` field of [AbstractWorkflowStep](#AbstractWorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SubworkflowFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "SubworkflowFeatureRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ScatterFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support the `scatter` and + `scatterMethod` fields of [ScatterWorkflowStep](#ScatterWorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ScatterFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "ScatterFeatureRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_ScatterFeatureRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links + listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "MultipleInputFeatureRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the `valueFrom` field + of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "StepInputExpressionRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "StepInputExpressionRequirement": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_StepInputExpressionRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class OperationInputParameter(InputParameter): + """ + Describe an input parameter of an operation. + + """ + + id: str + + def __init__( + self, + type_: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OperationInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type_ == other.type_ + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type_, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "OperationInputParameter": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + default = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `default`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + ] + ) + + +class OperationOutputParameter(OutputParameter): + """ + Describe an output parameter of an operation. + + """ + + id: str + + def __init__( + self, + type_: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OperationOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type_ == other.type_ + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type_, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "OperationOutputParameter": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `streamable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class Operation(Process): + """ + This record describes an abstract operation. It is a potential + step of a workflow that has not yet been bound to a concrete + implementation. It specifies an input and output signature, but + does not provide enough information to be executed. An + implementation (or other tooling) may provide a means of binding + an Operation to a concrete process (such as Workflow, + CommandLineTool, or ExpressionTool) with a compatible signature. + + """ + + id: str + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "Operation" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Operation): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None + ) -> "Operation": + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = load_field( + _doc.get("class"), + uri_Operation_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + label = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) + + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_OperationInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + detailed_message=f"the `inputs` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) + + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_OperationOutputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + detailed_message=f"the `outputs` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + cwlVersion = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("cwlVersion") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `cwlVersion`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("cwlVersion") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + detailed_message=f"the `cwlVersion` field with value `{val}` " + "is not valid because:", + ) + ) + intent = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("intent") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `intent`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("intent") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + detailed_message=f"the `intent` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.class_ is not None: + uri = self.loadingOptions.vocab[self.class_] + if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.intent is not None: + u = save_relative_uri(self.intent, self.id, True, None, relative_uris) + r["intent"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + ] + ) + + +_vocab = { + "AbstractWorkflowStep": "https://w3id.org/cwl/cwl#AbstractWorkflowStep", + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "CWLArraySchema": "https://w3id.org/cwl/cwl#CWLArraySchema", + "CWLInputFile": "https://w3id.org/cwl/cwl#CWLInputFile", + "CWLObjectType": "https://w3id.org/cwl/cwl#CWLObjectType", + "CWLRecordField": "https://w3id.org/cwl/cwl#CWLRecordField", + "CWLRecordSchema": "https://w3id.org/cwl/cwl#CWLRecordSchema", + "CWLType": "https://w3id.org/cwl/cwl#CWLType", + "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", + "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", + "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", + "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", + "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", + "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", + "CommandInputSchema": "https://w3id.org/cwl/cwl#CommandInputSchema", + "CommandLineBindable": "https://w3id.org/cwl/cwl#CommandLineBindable", + "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", + "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", + "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", + "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", + "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", + "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", + "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", + "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", + "Directory": "https://w3id.org/cwl/cwl#Directory", + "Dirent": "https://w3id.org/cwl/cwl#Dirent", + "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", + "Documented": "https://w3id.org/cwl/salad#Documented", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", + "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", + "Expression": "https://w3id.org/cwl/cwl#Expression", + "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", + "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", + "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", + "FieldBase": "https://w3id.org/cwl/cwl#FieldBase", + "File": "https://w3id.org/cwl/cwl#File", + "IOSchema": "https://w3id.org/cwl/cwl#IOSchema", + "Identified": "https://w3id.org/cwl/cwl#Identified", + "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", + "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", + "InplaceUpdateRequirement": "https://w3id.org/cwl/cwl#InplaceUpdateRequirement", + "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", + "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", + "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", + "InputFormat": "https://w3id.org/cwl/cwl#InputFormat", + "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", + "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", + "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", + "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", + "InputSink": "https://w3id.org/cwl/cwl#InputSink", + "Labeled": "https://w3id.org/cwl/cwl#Labeled", + "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", + "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", + "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", + "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", + "LoopInput": "https://w3id.org/cwl/cwl#LoopInput", + "LoopOutputMethod": "https://w3id.org/cwl/cwl#LoopOutputMethod", + "LoopWorkflowStep": "https://w3id.org/cwl/cwl#LoopWorkflowStep", + "MapSchema": "https://w3id.org/cwl/salad#MapSchema", + "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", + "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", + "Operation": "https://w3id.org/cwl/cwl#Operation", + "OperationInputParameter": "https://w3id.org/cwl/cwl#OperationInputParameter", + "OperationOutputParameter": "https://w3id.org/cwl/cwl#OperationOutputParameter", + "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", + "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", + "OutputFormat": "https://w3id.org/cwl/cwl#OutputFormat", + "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", + "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", + "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", + "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", + "OutputSink": "https://w3id.org/cwl/cwl#OutputSink", + "Parameter": "https://w3id.org/cwl/cwl#Parameter", + "PickValueMethod": "https://w3id.org/cwl/cwl#PickValueMethod", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "Process": "https://w3id.org/cwl/cwl#Process", + "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", + "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", + "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", + "ScatterWorkflowStep": "https://w3id.org/cwl/cwl#ScatterWorkflowStep", + "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", + "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", + "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", + "Sink": "https://w3id.org/cwl/cwl#Sink", + "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", + "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", + "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", + "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", + "ToolTimeLimit": "https://w3id.org/cwl/cwl#ToolTimeLimit", + "UnionSchema": "https://w3id.org/cwl/salad#UnionSchema", + "WorkReuse": "https://w3id.org/cwl/cwl#WorkReuse", + "Workflow": "https://w3id.org/cwl/cwl#Workflow", + "WorkflowInputParameter": "https://w3id.org/cwl/cwl#WorkflowInputParameter", + "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", + "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", + "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", + "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", + "all_iterations": "https://w3id.org/cwl/cwl#LoopOutputMethod/all_iterations", + "all_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", + "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", + "double": "http://www.w3.org/2001/XMLSchema#double", + "draft-2": "https://w3id.org/cwl/cwl#draft-2", + "draft-3": "https://w3id.org/cwl/cwl#draft-3", + "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", + "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", + "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", + "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", + "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", + "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", + "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", + "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", + "enum": "https://w3id.org/cwl/salad#enum", + "first_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null", + "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "last_iteration": "https://w3id.org/cwl/cwl#LoopOutputMethod/last_iteration", + "long": "http://www.w3.org/2001/XMLSchema#long", + "map": "https://w3id.org/cwl/salad#map", + "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", + "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", + "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", + "no_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing", + "null": "https://w3id.org/cwl/salad#null", + "record": "https://w3id.org/cwl/salad#record", + "shallow_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing", + "stderr": "https://w3id.org/cwl/cwl#stderr", + "stdin": "https://w3id.org/cwl/cwl#stdin", + "stdout": "https://w3id.org/cwl/cwl#stdout", + "string": "http://www.w3.org/2001/XMLSchema#string", + "the_only_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null", + "union": "https://w3id.org/cwl/salad#union", + "v1.0": "https://w3id.org/cwl/cwl#v1.0", + "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", + "v1.1": "https://w3id.org/cwl/cwl#v1.1", + "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", + "v1.2": "https://w3id.org/cwl/cwl#v1.2", + "v1.2.0-dev1": "https://w3id.org/cwl/cwl#v1.2.0-dev1", + "v1.2.0-dev2": "https://w3id.org/cwl/cwl#v1.2.0-dev2", + "v1.2.0-dev3": "https://w3id.org/cwl/cwl#v1.2.0-dev3", + "v1.2.0-dev4": "https://w3id.org/cwl/cwl#v1.2.0-dev4", + "v1.2.0-dev5": "https://w3id.org/cwl/cwl#v1.2.0-dev5", + "v1.3.0-dev1": "https://w3id.org/cwl/cwl#v1.3.0-dev1", +} +_rvocab = { + "https://w3id.org/cwl/cwl#AbstractWorkflowStep": "AbstractWorkflowStep", + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/cwl#CWLArraySchema": "CWLArraySchema", + "https://w3id.org/cwl/cwl#CWLInputFile": "CWLInputFile", + "https://w3id.org/cwl/cwl#CWLObjectType": "CWLObjectType", + "https://w3id.org/cwl/cwl#CWLRecordField": "CWLRecordField", + "https://w3id.org/cwl/cwl#CWLRecordSchema": "CWLRecordSchema", + "https://w3id.org/cwl/cwl#CWLType": "CWLType", + "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", + "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", + "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", + "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", + "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", + "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", + "https://w3id.org/cwl/cwl#CommandInputSchema": "CommandInputSchema", + "https://w3id.org/cwl/cwl#CommandLineBindable": "CommandLineBindable", + "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", + "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", + "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", + "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", + "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", + "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", + "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", + "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", + "https://w3id.org/cwl/cwl#Directory": "Directory", + "https://w3id.org/cwl/cwl#Dirent": "Dirent", + "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", + "https://w3id.org/cwl/salad#Documented": "Documented", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", + "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", + "https://w3id.org/cwl/cwl#Expression": "Expression", + "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", + "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", + "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", + "https://w3id.org/cwl/cwl#FieldBase": "FieldBase", + "https://w3id.org/cwl/cwl#File": "File", + "https://w3id.org/cwl/cwl#IOSchema": "IOSchema", + "https://w3id.org/cwl/cwl#Identified": "Identified", + "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", + "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", + "https://w3id.org/cwl/cwl#InplaceUpdateRequirement": "InplaceUpdateRequirement", + "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", + "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", + "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", + "https://w3id.org/cwl/cwl#InputFormat": "InputFormat", + "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", + "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", + "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", + "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", + "https://w3id.org/cwl/cwl#InputSink": "InputSink", + "https://w3id.org/cwl/cwl#Labeled": "Labeled", + "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", + "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", + "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", + "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", + "https://w3id.org/cwl/cwl#LoopInput": "LoopInput", + "https://w3id.org/cwl/cwl#LoopOutputMethod": "LoopOutputMethod", + "https://w3id.org/cwl/cwl#LoopWorkflowStep": "LoopWorkflowStep", + "https://w3id.org/cwl/salad#MapSchema": "MapSchema", + "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", + "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", + "https://w3id.org/cwl/cwl#Operation": "Operation", + "https://w3id.org/cwl/cwl#OperationInputParameter": "OperationInputParameter", + "https://w3id.org/cwl/cwl#OperationOutputParameter": "OperationOutputParameter", + "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", + "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", + "https://w3id.org/cwl/cwl#OutputFormat": "OutputFormat", + "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", + "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", + "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", + "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", + "https://w3id.org/cwl/cwl#OutputSink": "OutputSink", + "https://w3id.org/cwl/cwl#Parameter": "Parameter", + "https://w3id.org/cwl/cwl#PickValueMethod": "PickValueMethod", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/cwl#Process": "Process", + "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", + "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", + "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", + "https://w3id.org/cwl/cwl#ScatterWorkflowStep": "ScatterWorkflowStep", + "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", + "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", + "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", + "https://w3id.org/cwl/cwl#Sink": "Sink", + "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", + "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", + "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", + "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", + "https://w3id.org/cwl/cwl#ToolTimeLimit": "ToolTimeLimit", + "https://w3id.org/cwl/salad#UnionSchema": "UnionSchema", + "https://w3id.org/cwl/cwl#WorkReuse": "WorkReuse", + "https://w3id.org/cwl/cwl#Workflow": "Workflow", + "https://w3id.org/cwl/cwl#WorkflowInputParameter": "WorkflowInputParameter", + "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", + "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", + "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", + "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", + "https://w3id.org/cwl/cwl#LoopOutputMethod/all_iterations": "all_iterations", + "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null": "all_non_null", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", + "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/cwl#draft-2": "draft-2", + "https://w3id.org/cwl/cwl#draft-3": "draft-3", + "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", + "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", + "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", + "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", + "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", + "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", + "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", + "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", + "https://w3id.org/cwl/salad#enum": "enum", + "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null": "first_non_null", + "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "https://w3id.org/cwl/cwl#LoopOutputMethod/last_iteration": "last_iteration", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/salad#map": "map", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", + "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", + "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing": "no_listing", + "https://w3id.org/cwl/salad#null": "null", + "https://w3id.org/cwl/salad#record": "record", + "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing": "shallow_listing", + "https://w3id.org/cwl/cwl#stderr": "stderr", + "https://w3id.org/cwl/cwl#stdin": "stdin", + "https://w3id.org/cwl/cwl#stdout": "stdout", + "http://www.w3.org/2001/XMLSchema#string": "string", + "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null": "the_only_non_null", + "https://w3id.org/cwl/salad#union": "union", + "https://w3id.org/cwl/cwl#v1.0": "v1.0", + "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", + "https://w3id.org/cwl/cwl#v1.1": "v1.1", + "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", + "https://w3id.org/cwl/cwl#v1.2": "v1.2", + "https://w3id.org/cwl/cwl#v1.2.0-dev1": "v1.2.0-dev1", + "https://w3id.org/cwl/cwl#v1.2.0-dev2": "v1.2.0-dev2", + "https://w3id.org/cwl/cwl#v1.2.0-dev3": "v1.2.0-dev3", + "https://w3id.org/cwl/cwl#v1.2.0-dev4": "v1.2.0-dev4", + "https://w3id.org/cwl/cwl#v1.2.0-dev5": "v1.2.0-dev5", + "https://w3id.org/cwl/cwl#v1.3.0-dev1": "v1.3.0-dev1", +} + +strtype = _PrimitiveLoader(str) +inttype = _PrimitiveLoader(int) +floattype = _PrimitiveLoader(float) +booltype = _PrimitiveLoader(bool) +None_type = _PrimitiveLoader(type(None)) +Any_type = _AnyLoader() +PrimitiveTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + ), + "PrimitiveType", +) +""" +Names of salad data types (based on Avro schema declarations). + +Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for +detailed information. + +null: no value +boolean: a binary value +int: 32-bit signed integer +long: 64-bit signed integer +float: single precision (32-bit) IEEE 754 floating-point number +double: double precision (64-bit) IEEE 754 floating-point number +string: Unicode character sequence +""" +AnyLoader = _EnumLoader(("Any",), "Any") +""" +The **Any** type validates for any non-null value. +""" +RecordFieldLoader = _RecordLoader(RecordField, None, None) +RecordSchemaLoader = _RecordLoader(RecordSchema, None, None) +EnumSchemaLoader = _RecordLoader(EnumSchema, None, None) +ArraySchemaLoader = _RecordLoader(ArraySchema, None, None) +MapSchemaLoader = _RecordLoader(MapSchema, None, None) +UnionSchemaLoader = _RecordLoader(UnionSchema, None, None) +CWLTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + "File", + "Directory", + ), + "CWLType", +) +""" +Extends primitive types with the concept of a file and directory as a builtin type. +File: A File object +Directory: A Directory object +""" +CWLArraySchemaLoader = _RecordLoader(CWLArraySchema, None, None) +CWLRecordFieldLoader = _RecordLoader(CWLRecordField, None, None) +CWLRecordSchemaLoader = _RecordLoader(CWLRecordSchema, None, None) +FileLoader = _RecordLoader(File, None, None) +DirectoryLoader = _RecordLoader(Directory, None, None) +CWLObjectTypeLoader = _UnionLoader((), "CWLObjectTypeLoader") +union_of_None_type_or_CWLObjectTypeLoader = _UnionLoader( + ( + None_type, + CWLObjectTypeLoader, + ) +) +array_of_union_of_None_type_or_CWLObjectTypeLoader = _ArrayLoader( + union_of_None_type_or_CWLObjectTypeLoader +) +map_of_union_of_None_type_or_CWLObjectTypeLoader = _MapLoader( + union_of_None_type_or_CWLObjectTypeLoader, "None", None, None +) +InlineJavascriptRequirementLoader = _RecordLoader( + InlineJavascriptRequirement, None, None +) +SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement, None, None) +LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement, None, None) +DockerRequirementLoader = _RecordLoader(DockerRequirement, None, None) +SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement, None, None) +InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement, None, None) +EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement, None, None) +ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement, None, None) +ResourceRequirementLoader = _RecordLoader(ResourceRequirement, None, None) +WorkReuseLoader = _RecordLoader(WorkReuse, None, None) +NetworkAccessLoader = _RecordLoader(NetworkAccess, None, None) +InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement, None, None) +ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit, None, None) +SubworkflowFeatureRequirementLoader = _RecordLoader( + SubworkflowFeatureRequirement, None, None +) +ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement, None, None) +MultipleInputFeatureRequirementLoader = _RecordLoader( + MultipleInputFeatureRequirement, None, None +) +StepInputExpressionRequirementLoader = _RecordLoader( + StepInputExpressionRequirement, None, None +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + CWLObjectTypeLoader, + ) +) +map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader = _MapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader, + "CWLInputFile", + "@list", + True, +) +CWLInputFileLoader = map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader +CWLVersionLoader = _EnumLoader( + ( + "draft-2", + "draft-3.dev1", + "draft-3.dev2", + "draft-3.dev3", + "draft-3.dev4", + "draft-3.dev5", + "draft-3", + "draft-4.dev1", + "draft-4.dev2", + "draft-4.dev3", + "v1.0.dev4", + "v1.0", + "v1.1.0-dev1", + "v1.1", + "v1.2.0-dev1", + "v1.2.0-dev2", + "v1.2.0-dev3", + "v1.2.0-dev4", + "v1.2.0-dev5", + "v1.2", + "v1.3.0-dev1", + ), + "CWLVersion", +) +""" +Version symbols for published CWL document versions. +""" +LoadListingEnumLoader = _EnumLoader( + ( + "no_listing", + "shallow_listing", + "deep_listing", + ), + "LoadListingEnum", +) +""" +Specify the desired behavior for loading the `listing` field of +a Directory object for use by expressions. + +no_listing: Do not load the directory listing. +shallow_listing: Only load the top level listing, do not recurse into subdirectories. +deep_listing: Load the directory listing and recursively load all subdirectories as well. +""" +ExpressionLoader = _ExpressionLoader(str) +InputBindingLoader = _RecordLoader(InputBinding, None, None) +InputRecordFieldLoader = _RecordLoader(InputRecordField, None, None) +InputRecordSchemaLoader = _RecordLoader(InputRecordSchema, None, None) +InputEnumSchemaLoader = _RecordLoader(InputEnumSchema, None, None) +InputArraySchemaLoader = _RecordLoader(InputArraySchema, None, None) +OutputRecordFieldLoader = _RecordLoader(OutputRecordField, None, None) +OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema, None, None) +OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema, None, None) +OutputArraySchemaLoader = _RecordLoader(OutputArraySchema, None, None) +SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema, None, None) +EnvironmentDefLoader = _RecordLoader(EnvironmentDef, None, None) +CommandLineBindingLoader = _RecordLoader(CommandLineBinding, None, None) +CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding, None, None) +CommandLineBindableLoader = _RecordLoader(CommandLineBindable, None, None) +CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField, None, None) +CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema, None, None) +CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema, None, None) +CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema, None, None) +CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField, None, None) +CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema, None, None) +CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema, None, None) +CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema, None, None) +CommandInputParameterLoader = _RecordLoader(CommandInputParameter, None, None) +CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter, None, None) +stdinLoader = _EnumLoader(("stdin",), "stdin") +""" +Only valid as a `type` for a `CommandLineTool` input with no +`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` +level. + +The following +``` +inputs: + an_input_name: + type: stdin +``` +is equivalent to +``` +inputs: + an_input_name: + type: File + streamable: true + +stdin: $(inputs.an_input_name.path) +``` +""" +stdoutLoader = _EnumLoader(("stdout",), "stdout") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stdout + +stdout: a_stdout_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + +stdout: a_stdout_file +``` + +If there is no `stdout` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stdout +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + +stdout: random_stdout_filenameABCDEFG +``` + +If the `CommandLineTool` contains logically chained commands +(e.g. `echo a && echo b`) `stdout` must include the output of +every command. +""" +stderrLoader = _EnumLoader(("stderr",), "stderr") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stderr + +stderr: a_stderr_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + +stderr: a_stderr_file +``` + +If there is no `stderr` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stderr +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + +stderr: random_stderr_filenameABCDEFG +``` +""" +CommandLineToolLoader = _RecordLoader(CommandLineTool, None, None) +SoftwarePackageLoader = _RecordLoader(SoftwarePackage, None, None) +DirentLoader = _RecordLoader(Dirent, None, None) +ExpressionToolOutputParameterLoader = _RecordLoader( + ExpressionToolOutputParameter, None, None +) +WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter, None, None) +ExpressionToolLoader = _RecordLoader(ExpressionTool, None, None) +LinkMergeMethodLoader = _EnumLoader( + ( + "merge_nested", + "merge_flattened", + ), + "LinkMergeMethod", +) +""" +The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +""" +PickValueMethodLoader = _EnumLoader( + ( + "first_non_null", + "the_only_non_null", + "all_non_null", + ), + "PickValueMethod", +) +""" +Picking non-null values among inbound data links, described in [WorkflowStepInput](#WorkflowStepInput). +""" +WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter, None, None) +WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput, None, None) +WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput, None, None) +LoopInputLoader = _RecordLoader(LoopInput, None, None) +ScatterMethodLoader = _EnumLoader( + ( + "dotproduct", + "nested_crossproduct", + "flat_crossproduct", + ), + "ScatterMethod", +) +""" +The scatter method, as described in [workflow step scatter](#ScatterWorkflowStep). +""" +LoopOutputMethodLoader = _EnumLoader( + ( + "last_iteration", + "all_iterations", + ), + "LoopOutputMethod", +) +""" +The loop output method, as described in [workflow step loop](#LoopWorkflowStep). +""" +WorkflowStepLoader = _RecordLoader(WorkflowStep, None, None) +ScatterWorkflowStepLoader = _RecordLoader(ScatterWorkflowStep, None, None) +LoopWorkflowStepLoader = _RecordLoader(LoopWorkflowStep, None, None) +WorkflowLoader = _RecordLoader(Workflow, None, None) +OperationInputParameterLoader = _RecordLoader(OperationInputParameter, None, None) +OperationOutputParameterLoader = _RecordLoader(OperationOutputParameter, None, None) +OperationLoader = _RecordLoader(Operation, None, None) +array_of_strtype = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ) +) +uri_strtype_True_False_None_None = _URILoader(strtype, True, False, None, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + MapSchemaLoader, + UnionSchemaLoader, + strtype, + ) +) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _ArrayLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype +) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + MapSchemaLoader, + UnionSchemaLoader, + strtype, + array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, + ) +) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, + 2, + "v1.3", +) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_RecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" +) +Record_nameLoader = _EnumLoader(("record",), "Record_name") +typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2, "v1.3") +union_of_None_type_or_strtype = _UnionLoader( + ( + None_type, + strtype, + ) +) +uri_union_of_None_type_or_strtype_True_False_None_None = _URILoader( + union_of_None_type_or_strtype, True, False, None, None +) +uri_array_of_strtype_True_False_None_None = _URILoader( + array_of_strtype, True, False, None, None +) +Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") +typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2, "v1.3") +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None = _URILoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, + False, + True, + 2, + None, +) +Array_nameLoader = _EnumLoader(("array",), "Array_name") +typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2, "v1.3") +Map_nameLoader = _EnumLoader(("map",), "Map_name") +typedsl_Map_nameLoader_2 = _TypeDSLLoader(Map_nameLoader, 2, "v1.3") +Union_nameLoader = _EnumLoader(("union",), "Union_name") +typedsl_Union_nameLoader_2 = _TypeDSLLoader(Union_nameLoader, 2, "v1.3") +union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + CWLRecordSchemaLoader, + EnumSchemaLoader, + CWLArraySchemaLoader, + strtype, + ) +) +array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype +) +union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + CWLRecordSchemaLoader, + EnumSchemaLoader, + CWLArraySchemaLoader, + strtype, + array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, + ) +) +uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( + union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, + False, + True, + 2, + None, +) +typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, + 2, + "v1.3", +) +array_of_CWLRecordFieldLoader = _ArrayLoader(CWLRecordFieldLoader) +union_of_None_type_or_array_of_CWLRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CWLRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_CWLRecordFieldLoader, "name", "type" +) +File_classLoader = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None_None = _URILoader( + File_classLoader, False, True, None, None +) +uri_union_of_None_type_or_strtype_False_False_None_None = _URILoader( + union_of_None_type_or_strtype, False, False, None, None +) +union_of_None_type_or_inttype = _UnionLoader( + ( + None_type, + inttype, + ) +) +union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + ) +) +array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader +) +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + _SecondaryDSLLoader( + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader + ), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +uri_union_of_None_type_or_strtype_True_False_None_True = _URILoader( + union_of_None_type_or_strtype, True, False, None, True +) +Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None_None = _URILoader( + Directory_classLoader, False, True, None, None +) +union_of_None_type_or_booltype = _UnionLoader( + ( + None_type, + booltype, + ) +) +union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( + ( + None_type, + LoadListingEnumLoader, + ) +) +array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( + ( + _SecondaryDSLLoader( + union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader + ), + union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + ) +) +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, + True, +) +union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True = _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None, True +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + 2, + "v1.3", +) +array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_InputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +) +uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + False, + True, + 2, + None, +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + 2, + "v1.3", +) +array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_OutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +) +uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + False, + True, + 2, + None, +) +union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _UnionLoader( + ( + CommandInputParameterLoader, + WorkflowInputParameterLoader, + OperationInputParameterLoader, + ) +) +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _ArrayLoader( + union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader +) +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader, + "id", + "type", +) +union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _UnionLoader( + ( + CommandOutputParameterLoader, + ExpressionToolOutputParameterLoader, + WorkflowOutputParameterLoader, + OperationOutputParameterLoader, + ) +) +array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _ArrayLoader( + union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader +) +idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _IdMapLoader( + array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader, + "id", + "type", +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + ) +) +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + "class", + "None", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + Any_type, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + "class", + "None", +) +union_of_None_type_or_CWLVersionLoader = _UnionLoader( + ( + None_type, + CWLVersionLoader, + ) +) +uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None = _URILoader( + union_of_None_type_or_CWLVersionLoader, False, True, None, None +) +union_of_None_type_or_array_of_strtype = _UnionLoader( + ( + None_type, + array_of_strtype, + ) +) +uri_union_of_None_type_or_array_of_strtype_True_False_None_None = _URILoader( + union_of_None_type_or_array_of_strtype, True, False, None, None +) +InlineJavascriptRequirement_classLoader = _EnumLoader( + ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" +) +uri_InlineJavascriptRequirement_classLoader_False_True_None_None = _URILoader( + InlineJavascriptRequirement_classLoader, False, True, None, None +) +SchemaDefRequirement_classLoader = _EnumLoader( + ("SchemaDefRequirement",), "SchemaDefRequirement_class" +) +uri_SchemaDefRequirement_classLoader_False_True_None_None = _URILoader( + SchemaDefRequirement_classLoader, False, True, None, None +) +union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( + ( + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + ) +) +array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( + union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader +) +union_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + ) +) +union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + booltype, + ExpressionLoader, + ) +) +LoadListingRequirement_classLoader = _EnumLoader( + ("LoadListingRequirement",), "LoadListingRequirement_class" +) +uri_LoadListingRequirement_classLoader_False_True_None_None = _URILoader( + LoadListingRequirement_classLoader, False, True, None, None +) +union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) +) +union_of_None_type_or_ExpressionLoader = _UnionLoader( + ( + None_type, + ExpressionLoader, + ) +) +union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + CommandLineBindingLoader, + ) +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, + "v1.3", +) +array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandInputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) +) +uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + False, + True, + 2, + None, +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, + "v1.3", +) +union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( + ( + None_type, + CommandOutputBindingLoader, + ) +) +array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandOutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) +) +uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + False, + True, + 2, + None, +) +union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdinLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, + "v1.3", +) +union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdoutLoader, + stderrLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, + "v1.3", +) +CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") +uri_CommandLineTool_classLoader_False_True_None_None = _URILoader( + CommandLineTool_classLoader, False, True, None, None +) +array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( + array_of_CommandInputParameterLoader, "id", "type" +) +array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) +idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( + array_of_CommandOutputParameterLoader, "id", "type" +) +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + CommandLineBindingLoader, + ) +) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) +) +array_of_inttype = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype = _UnionLoader( + ( + None_type, + array_of_inttype, + ) +) +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) +uri_DockerRequirement_classLoader_False_True_None_None = _URILoader( + DockerRequirement_classLoader, False, True, None, None +) +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) +uri_SoftwareRequirement_classLoader_False_True_None_None = _URILoader( + SoftwareRequirement_classLoader, False, True, None, None +) +array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( + array_of_SoftwarePackageLoader, "package", "specs" +) +uri_union_of_None_type_or_array_of_strtype_False_False_None_True = _URILoader( + union_of_None_type_or_array_of_strtype, False, False, None, True +) +InitialWorkDirRequirement_classLoader = _EnumLoader( + ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" +) +uri_InitialWorkDirRequirement_classLoader_False_True_None_None = _URILoader( + InitialWorkDirRequirement_classLoader, False, True, None, None +) +union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + DirentLoader, + ExpressionLoader, + FileLoader, + DirectoryLoader, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader +) +union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + ExpressionLoader, + array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) +uri_EnvVarRequirement_classLoader_False_True_None_None = _URILoader( + EnvVarRequirement_classLoader, False, True, None, None +) +array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( + array_of_EnvironmentDefLoader, "envName", "envValue" +) +ShellCommandRequirement_classLoader = _EnumLoader( + ("ShellCommandRequirement",), "ShellCommandRequirement_class" +) +uri_ShellCommandRequirement_classLoader_False_True_None_None = _URILoader( + ShellCommandRequirement_classLoader, False, True, None, None +) +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) +uri_ResourceRequirement_classLoader_False_True_None_None = _URILoader( + ResourceRequirement_classLoader, False, True, None, None +) +union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + floattype, + ExpressionLoader, + ) +) +WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") +uri_WorkReuse_classLoader_False_True_None_None = _URILoader( + WorkReuse_classLoader, False, True, None, None +) +union_of_booltype_or_ExpressionLoader = _UnionLoader( + ( + booltype, + ExpressionLoader, + ) +) +NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") +uri_NetworkAccess_classLoader_False_True_None_None = _URILoader( + NetworkAccess_classLoader, False, True, None, None +) +InplaceUpdateRequirement_classLoader = _EnumLoader( + ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" +) +uri_InplaceUpdateRequirement_classLoader_False_True_None_None = _URILoader( + InplaceUpdateRequirement_classLoader, False, True, None, None +) +ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") +uri_ToolTimeLimit_classLoader_False_True_None_None = _URILoader( + ToolTimeLimit_classLoader, False, True, None, None +) +union_of_inttype_or_ExpressionLoader = _UnionLoader( + ( + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_InputBindingLoader = _UnionLoader( + ( + None_type, + InputBindingLoader, + ) +) +ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") +uri_ExpressionTool_classLoader_False_True_None_None = _URILoader( + ExpressionTool_classLoader, False, True, None, None +) +array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) +idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( + array_of_WorkflowInputParameterLoader, "id", "type" +) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) +idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( + array_of_ExpressionToolOutputParameterLoader, "id", "type" +) +union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( + ( + None_type, + LinkMergeMethodLoader, + ) +) +union_of_None_type_or_PickValueMethodLoader = _UnionLoader( + ( + None_type, + PickValueMethodLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2, None +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1, None +) +union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( + ( + None_type, + FileLoader, + DirectoryLoader, + Any_type, + ) +) +array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( + array_of_WorkflowStepInputLoader, "id", "source" +) +union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + ( + strtype, + WorkflowStepOutputLoader, + ) +) +array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( + union_of_strtype_or_WorkflowStepOutputLoader +) +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) +) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None = _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + None, +) +array_of_Any_type = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type = _UnionLoader( + ( + None_type, + array_of_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_Any_type, "class", "None" +) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + ) +) +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None_None = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + False, + False, + None, + None, +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0, None +) +union_of_None_type_or_ScatterMethodLoader = _UnionLoader( + ( + None_type, + ScatterMethodLoader, + ) +) +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None = _URILoader( + union_of_None_type_or_ScatterMethodLoader, False, True, None, None +) +union_of_ExpressionLoader = _UnionLoader((ExpressionLoader,)) +array_of_LoopInputLoader = _ArrayLoader(LoopInputLoader) +union_of_None_type_or_array_of_LoopInputLoader = _UnionLoader( + ( + None_type, + array_of_LoopInputLoader, + ) +) +idmap_loop_union_of_None_type_or_array_of_LoopInputLoader = _IdMapLoader( + union_of_None_type_or_array_of_LoopInputLoader, "id", "outputSource" +) +union_of_None_type_or_LoopOutputMethodLoader = _UnionLoader( + ( + None_type, + LoopOutputMethodLoader, + ) +) +uri_union_of_None_type_or_LoopOutputMethodLoader_False_True_None_None = _URILoader( + union_of_None_type_or_LoopOutputMethodLoader, False, True, None, None +) +Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None_None = _URILoader( + Workflow_classLoader, False, True, None, None +) +array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) +idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( + array_of_WorkflowOutputParameterLoader, "id", "type" +) +union_of_WorkflowStepLoader_or_ScatterWorkflowStepLoader_or_LoopWorkflowStepLoader = ( + _UnionLoader( + ( + WorkflowStepLoader, + ScatterWorkflowStepLoader, + LoopWorkflowStepLoader, + ) + ) +) +array_of_union_of_WorkflowStepLoader_or_ScatterWorkflowStepLoader_or_LoopWorkflowStepLoader = _ArrayLoader( + union_of_WorkflowStepLoader_or_ScatterWorkflowStepLoader_or_LoopWorkflowStepLoader +) +union_of_array_of_union_of_WorkflowStepLoader_or_ScatterWorkflowStepLoader_or_LoopWorkflowStepLoader = _UnionLoader( + ( + array_of_union_of_WorkflowStepLoader_or_ScatterWorkflowStepLoader_or_LoopWorkflowStepLoader, + ) +) +idmap_steps_union_of_array_of_union_of_WorkflowStepLoader_or_ScatterWorkflowStepLoader_or_LoopWorkflowStepLoader = _IdMapLoader( + union_of_array_of_union_of_WorkflowStepLoader_or_ScatterWorkflowStepLoader_or_LoopWorkflowStepLoader, + "id", + "None", +) +SubworkflowFeatureRequirement_classLoader = _EnumLoader( + ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" +) +uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None = _URILoader( + SubworkflowFeatureRequirement_classLoader, False, True, None, None +) +ScatterFeatureRequirement_classLoader = _EnumLoader( + ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" +) +uri_ScatterFeatureRequirement_classLoader_False_True_None_None = _URILoader( + ScatterFeatureRequirement_classLoader, False, True, None, None +) +MultipleInputFeatureRequirement_classLoader = _EnumLoader( + ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" +) +uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None = _URILoader( + MultipleInputFeatureRequirement_classLoader, False, True, None, None +) +StepInputExpressionRequirement_classLoader = _EnumLoader( + ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" +) +uri_StepInputExpressionRequirement_classLoader_False_True_None_None = _URILoader( + StepInputExpressionRequirement_classLoader, False, True, None, None +) +Operation_classLoader = _EnumLoader(("Operation",), "Operation_class") +uri_Operation_classLoader_False_True_None_None = _URILoader( + Operation_classLoader, False, True, None, None +) +array_of_OperationInputParameterLoader = _ArrayLoader(OperationInputParameterLoader) +idmap_inputs_array_of_OperationInputParameterLoader = _IdMapLoader( + array_of_OperationInputParameterLoader, "id", "type" +) +array_of_OperationOutputParameterLoader = _ArrayLoader(OperationOutputParameterLoader) +idmap_outputs_array_of_OperationOutputParameterLoader = _IdMapLoader( + array_of_OperationOutputParameterLoader, "id", "type" +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + ) +) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + ) +) + +CWLObjectTypeLoader.add_loaders( + ( + booltype, + inttype, + floattype, + strtype, + FileLoader, + DirectoryLoader, + array_of_union_of_None_type_or_CWLObjectTypeLoader, + map_of_union_of_None_type_or_CWLObjectTypeLoader, + ) +) + + +def load_document( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + doc, + baseuri, + loadingOptions, + ) + return result + + +def load_document_with_metadata( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=baseuri) + return _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + doc, + baseuri, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + +def load_document_by_string( + string: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + yaml = yaml_no_ts() + result = yaml.load(string) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + result, + uri, + loadingOptions, + ) + return result + + +def load_document_by_yaml( + yaml: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + """ + Shortcut to load via a YAML object. + yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True + """ + add_lc_filename(yaml, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + yaml, + uri, + loadingOptions, + ) + return result diff --git a/cwl_utils/parser/cwl_v1_3_utils.py b/cwl_utils/parser/cwl_v1_3_utils.py new file mode 100644 index 00000000..7f20bbe6 --- /dev/null +++ b/cwl_utils/parser/cwl_v1_3_utils.py @@ -0,0 +1,668 @@ +# SPDX-License-Identifier: Apache-2.0 +import hashlib +import logging +import os +from collections import namedtuple +from collections.abc import MutableMapping, MutableSequence +from io import StringIO +from typing import IO, Any, Optional, Union, cast +from urllib.parse import urldefrag + +from schema_salad.exceptions import ValidationException +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.utils import aslist, json_dumps, yaml_no_ts + +import cwl_utils.parser +import cwl_utils.parser.cwl_v1_3 as cwl +import cwl_utils.parser.utils +from cwl_utils.errors import WorkflowException +from cwl_utils.utils import yaml_dumps + +CONTENT_LIMIT: int = 64 * 1024 + +_logger = logging.getLogger("cwl_utils") + +SrcSink = namedtuple("SrcSink", ["src", "sink", "linkMerge", "message"]) + + +def _compare_records( + src: cwl.RecordSchema, sink: cwl.RecordSchema, strict: bool = False +) -> bool: + """ + Compare two records, ensuring they have compatible fields. + + This handles normalizing record names, which will be relative to workflow + step, so that they can be compared. + """ + srcfields = {cwl.shortname(field.name): field.type_ for field in (src.fields or {})} + sinkfields = { + cwl.shortname(field.name): field.type_ for field in (sink.fields or {}) + } + for key in sinkfields.keys(): + if ( + not can_assign_src_to_sink( + srcfields.get(key, "null"), sinkfields.get(key, "null"), strict + ) + and sinkfields.get(key) is not None + ): + _logger.info( + "Record comparison failure for %s and %s\n" + "Did not match fields for %s: %s and %s", + cast( + Union[cwl.InputRecordSchema, cwl.CommandOutputRecordSchema], src + ).name, + cast( + Union[cwl.InputRecordSchema, cwl.CommandOutputRecordSchema], sink + ).name, + key, + srcfields.get(key), + sinkfields.get(key), + ) + return False + return True + + +def _compare_type(type1: Any, type2: Any) -> bool: + if isinstance(type1, cwl.ArraySchema) and isinstance(type2, cwl.ArraySchema): + return _compare_type(type1.items, type2.items) + elif isinstance(type1, cwl.RecordSchema) and isinstance(type2, cwl.RecordSchema): + fields1 = { + cwl.shortname(field.name): field.type_ for field in (type1.fields or {}) + } + fields2 = { + cwl.shortname(field.name): field.type_ for field in (type2.fields or {}) + } + if fields1.keys() != fields2.keys(): + return False + return all(_compare_type(fields1[k], fields2[k]) for k in fields1.keys()) + elif isinstance(type1, MutableSequence) and isinstance(type2, MutableSequence): + if len(type1) != len(type2): + return False + for t1 in type1: + if not any(_compare_type(t1, t2) for t2 in type2): + return False + return True + else: + return bool(type1 == type2) + + +def _is_all_output_method_loop_step( + param_to_step: dict[str, cwl.WorkflowStep], parm_id: str +) -> bool: + if (source_step := param_to_step.get(parm_id)) is not None: + if ( + isinstance(source_step, cwl.LoopWorkflowStep) + and source_step.outputMethod == "all_iterations" + ): + return True + return False + + +def _is_conditional_step( + param_to_step: dict[str, cwl.WorkflowStep], parm_id: str +) -> bool: + if (source_step := param_to_step.get(parm_id)) is not None: + if source_step.when is not None: + return True + return False + + +def _inputfile_load( + doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + baseuri: str, + loadingOptions: cwl.LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> tuple[Any, cwl.LoadingOptions]: + loader = cwl.CWLInputFileLoader + if isinstance(doc, str): + url = loadingOptions.fetcher.urljoin(baseuri, doc) + if url in loadingOptions.idx: + return loadingOptions.idx[url] + doc_url, frg = urldefrag(url) + text = loadingOptions.fetcher.fetch_text(doc_url) + textIO = StringIO(text) + textIO.name = str(doc_url) + yaml = yaml_no_ts() + result = yaml.load(textIO) + add_lc_filename(result, doc_url) + loadingOptions = cwl.LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) + _inputfile_load( + result, + doc_url, + loadingOptions, + ) + return loadingOptions.idx[url] + + if isinstance(doc, MutableMapping): + addl_metadata = {} + if addl_metadata_fields is not None: + for mf in addl_metadata_fields: + if mf in doc: + addl_metadata[mf] = doc[mf] + + loadingOptions = cwl.LoadingOptions( + copyfrom=loadingOptions, + baseuri=baseuri, + addl_metadata=addl_metadata, + ) + + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), + loadingOptions, + ) + + return loadingOptions.idx[baseuri] + + if isinstance(doc, MutableSequence): + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions), + loadingOptions, + ) + return loadingOptions.idx[baseuri] + + raise ValidationException( + "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) + ) + + +def can_assign_src_to_sink(src: Any, sink: Any, strict: bool = False) -> bool: + """ + Check for identical type specifications, ignoring extra keys like inputBinding. + + src: admissible source types + sink: admissible sink types + + In non-strict comparison, at least one source type must match one sink type, + except for 'null'. + In strict comparison, all source types must match at least one sink type. + """ + if src == "Any" or sink == "Any": + return True + if isinstance(src, cwl.ArraySchema) and isinstance(sink, cwl.ArraySchema): + return can_assign_src_to_sink(src.items, sink.items, strict) + if isinstance(src, cwl.RecordSchema) and isinstance(sink, cwl.RecordSchema): + return _compare_records(src, sink, strict) + if isinstance(src, MutableSequence): + if strict: + for this_src in src: + if not can_assign_src_to_sink(this_src, sink): + return False + return True + for this_src in src: + if this_src != "null" and can_assign_src_to_sink(this_src, sink): + return True + return False + if isinstance(sink, MutableSequence): + for this_sink in sink: + if can_assign_src_to_sink(src, this_sink): + return True + return False + return bool(src == sink) + + +def check_all_types( + src_dict: dict[str, Any], + sinks: MutableSequence[Union[cwl.WorkflowStepInput, cwl.WorkflowOutputParameter]], + param_to_step: dict[str, cwl.WorkflowStep], + type_dict: dict[str, Any], +) -> dict[str, list[SrcSink]]: + """Given a list of sinks, check if their types match with the types of their sources.""" + validation: dict[str, list[SrcSink]] = {"warning": [], "exception": []} + for sink in sinks: + extra_message = ( + "pickValue is %s" % sink.pickValue if sink.pickValue is not None else None + ) + if isinstance(sink, cwl.WorkflowOutputParameter): + sourceName = "outputSource" + sourceField = sink.outputSource + elif isinstance(sink, cwl.WorkflowStepInput): + sourceName = "source" + sourceField = sink.source + else: + continue + if sourceField is not None: + if isinstance(sourceField, MutableSequence): + linkMerge = sink.linkMerge or ( + "merge_nested" if len(sourceField) > 1 else None + ) + if sink.pickValue in ["first_non_null", "the_only_non_null"]: + linkMerge = None + srcs_of_sink = [] + for parm_id in sourceField: + srcs_of_sink += [src_dict[parm_id]] + if ( + _is_conditional_step(param_to_step, parm_id) + and sink.pickValue is not None + ): + validation["warning"].append( + SrcSink( + src_dict[parm_id], + sink, + linkMerge, + message="Source is from conditional step, but pickValue is not used", + ) + ) + if _is_all_output_method_loop_step(param_to_step, parm_id): + src_typ = type_dict[src_dict[parm_id].id] + type_dict[src_dict[parm_id].id] = cwl.ArraySchema( + items=src_typ, type_="array" + ) + else: + parm_id = cast(str, sourceField) + if parm_id not in src_dict: + raise SourceLine(sink, sourceName, ValidationException).makeError( + f"{sourceName} not found: {parm_id}" + ) + srcs_of_sink = [src_dict[parm_id]] + linkMerge = None + if sink.pickValue is not None: + validation["warning"].append( + SrcSink( + src_dict[parm_id], + sink, + linkMerge, + message="pickValue is used but only a single input source is declared", + ) + ) + if _is_conditional_step(param_to_step, parm_id): + src_typ = aslist(type_dict[src_dict[parm_id].id]) + snk_typ = type_dict[sink.id] + if "null" not in src_typ: + src_typ = ["null"] + cast(list[Any], src_typ) + if ( + not isinstance(snk_typ, MutableSequence) + or "null" not in snk_typ + ): + validation["warning"].append( + SrcSink( + src_dict[parm_id], + sink, + linkMerge, + message="Source is from conditional step and may produce `null`", + ) + ) + type_dict[src_dict[parm_id].id] = src_typ + if _is_all_output_method_loop_step(param_to_step, parm_id): + src_typ = type_dict[src_dict[parm_id].id] + type_dict[src_dict[parm_id].id] = cwl.ArraySchema( + items=src_typ, type_="array" + ) + for src in srcs_of_sink: + check_result = check_types( + type_dict[cast(str, src.id)], + type_dict[sink.id], + linkMerge, + getattr(sink, "valueFrom", None), + ) + if check_result == "warning": + validation["warning"].append( + SrcSink(src, sink, linkMerge, extra_message) + ) + elif check_result == "exception": + validation["exception"].append( + SrcSink(src, sink, linkMerge, extra_message) + ) + return validation + + +def check_types( + srctype: Any, + sinktype: Any, + linkMerge: Optional[str], + valueFrom: Optional[str] = None, +) -> str: + """ + Check if the source and sink types are correct. + + Acceptable types are "pass", "warning", or "exception". + """ + if valueFrom is not None: + return "pass" + if linkMerge is None: + if can_assign_src_to_sink(srctype, sinktype, strict=True): + return "pass" + if can_assign_src_to_sink(srctype, sinktype, strict=False): + return "warning" + return "exception" + if linkMerge == "merge_nested": + return check_types( + cwl.ArraySchema(items=srctype, type_="array"), sinktype, None, None + ) + if linkMerge == "merge_flattened": + return check_types(merge_flatten_type(srctype), sinktype, None, None) + raise ValidationException(f"Invalid value {linkMerge} for linkMerge field.") + + +def content_limit_respected_read_bytes(f: IO[bytes]) -> bytes: + """ + Read file content up to 64 kB as a byte array. + + Throw exception for larger files (see https://www.commonwl.org/v1.2/Workflow.html#Changelog). + """ + contents = f.read(CONTENT_LIMIT + 1) + if len(contents) > CONTENT_LIMIT: + raise WorkflowException( + "file is too large, loadContents limited to %d bytes" % CONTENT_LIMIT + ) + return contents + + +def content_limit_respected_read(f: IO[bytes]) -> str: + """ + Read file content up to 64 kB as an utf-8 encoded string. + + Throw exception for larger files (see https://www.commonwl.org/v1.2/Workflow.html#Changelog). + """ + return content_limit_respected_read_bytes(f).decode("utf-8") + + +def convert_stdstreams_to_files(clt: cwl.CommandLineTool) -> None: + """Convert stdin, stdout and stderr type shortcuts to files.""" + for out in clt.outputs: + if out.type_ == "stdout": + if out.outputBinding is not None: + raise ValidationException( + "Not allowed to specify outputBinding when using stdout shortcut." + ) + if clt.stdout is None: + clt.stdout = str( + hashlib.sha1( # nosec + json_dumps(clt.save(), sort_keys=True).encode("utf-8") + ).hexdigest() + ) + out.type_ = "File" + out.outputBinding = cwl.CommandOutputBinding(glob=clt.stdout) + elif out.type_ == "stderr": + if out.outputBinding is not None: + raise ValidationException( + "Not allowed to specify outputBinding when using stderr shortcut." + ) + if clt.stderr is None: + clt.stderr = str( + hashlib.sha1( # nosec + json_dumps(clt.save(), sort_keys=True).encode("utf-8") + ).hexdigest() + ) + out.type_ = "File" + out.outputBinding = cwl.CommandOutputBinding(glob=clt.stderr) + for inp in clt.inputs: + if inp.type_ == "stdin": + if inp.inputBinding is not None: + raise ValidationException( + "Not allowed to specify unputBinding when using stdin shortcut." + ) + if clt.stdin is not None: + raise ValidationException( + "Not allowed to specify stdin path when using stdin type shortcut." + ) + else: + clt.stdin = ( + "$(inputs.%s.path)" + % cast(str, inp.id).rpartition("#")[2].split("/")[-1] + ) + inp.type_ = "File" + + +def load_inputfile( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[cwl.LoadingOptions] = None, +) -> Any: + """Load a CWL v1.2 input file from a serialized YAML string or a YAML object.""" + if baseuri is None: + baseuri = cwl.file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = cwl.LoadingOptions() + + result, metadata = _inputfile_load( + doc, + baseuri, + loadingOptions, + ) + return result + + +def load_inputfile_by_string( + string: Any, + uri: str, + loadingOptions: Optional[cwl.LoadingOptions] = None, +) -> Any: + """Load a CWL v1.2 input file from a serialized YAML string.""" + yaml = yaml_no_ts() + result = yaml.load(string) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = cwl.LoadingOptions(fileuri=uri) + + result, metadata = _inputfile_load( + result, + uri, + loadingOptions, + ) + return result + + +def load_inputfile_by_yaml( + yaml: Any, + uri: str, + loadingOptions: Optional[cwl.LoadingOptions] = None, +) -> Any: + """Load a CWL v1.2 input file from a YAML object.""" + add_lc_filename(yaml, uri) + + if loadingOptions is None: + loadingOptions = cwl.LoadingOptions(fileuri=uri) + + result, metadata = _inputfile_load( + yaml, + uri, + loadingOptions, + ) + return result + + +def merge_flatten_type(src: Any) -> Any: + """Return the merge flattened type of the source type.""" + if isinstance(src, MutableSequence): + return [merge_flatten_type(t) for t in src] + if isinstance(src, cwl.ArraySchema): + return src + return cwl.ArraySchema(type_="array", items=src) + + +def type_for_step_input( + step: cwl.WorkflowStep, + in_: cwl.WorkflowStepInput, +) -> Any: + """Determine the type for the given step input.""" + if in_.valueFrom is not None: + return "Any" + step_run = cwl_utils.parser.utils.load_step(step) + cwl_utils.parser.utils.convert_stdstreams_to_files(step_run) + if step_run and step_run.inputs: + for step_input in step_run.inputs: + if cast(str, step_input.id).split("#")[-1] == in_.id.split("#")[-1]: + input_type = step_input.type_ + if isinstance(step, cwl.ScatterWorkflowStep) and in_.id in aslist( + step.scatter + ): + input_type = cwl.ArraySchema(items=input_type, type_="array") + return input_type + return "Any" + + +def type_for_step_output( + step: cwl.WorkflowStep, + sourcename: str, +) -> Any: + """Determine the type for the given step output.""" + step_run = cwl_utils.parser.utils.load_step(step) + cwl_utils.parser.utils.convert_stdstreams_to_files(step_run) + if step_run and step_run.outputs: + for output in step_run.outputs: + if ( + output.id.split("#")[-1].split("/")[-1] + == sourcename.split("#")[-1].split("/")[-1] + ): + output_type = output.type_ + if isinstance(step, cwl.ScatterWorkflowStep): + if step.scatterMethod == "nested_crossproduct": + for _ in range(len(aslist(step.scatter))): + output_type = cwl.ArraySchema( + items=output_type, type_="array" + ) + else: + output_type = cwl.ArraySchema(items=output_type, type_="array") + return output_type + raise ValidationException( + "param {} not found in {}.".format( + sourcename, + yaml_dumps(cwl.save(step)), + ) + ) + + +def type_for_source( + process: Union[cwl.CommandLineTool, cwl.Workflow, cwl.ExpressionTool], + sourcenames: Union[str, list[str]], + parent: Optional[cwl.Workflow] = None, + linkMerge: Optional[str] = None, + pickValue: Optional[str] = None, +) -> Any: + """Determine the type for the given sourcenames.""" + scatter_context: list[Optional[tuple[int, str]]] = [] + params = param_for_source_id(process, sourcenames, parent, scatter_context) + if not isinstance(params, list): + new_type = params.type_ + if scatter_context[0] is not None: + if scatter_context[0][1] == "nested_crossproduct": + for _ in range(scatter_context[0][0]): + new_type = cwl.ArraySchema(items=new_type, type_="array") + else: + new_type = cwl.ArraySchema(items=new_type, type_="array") + if linkMerge == "merge_nested": + new_type = cwl.ArraySchema(items=new_type, type_="array") + elif linkMerge == "merge_flattened": + new_type = merge_flatten_type(new_type) + if pickValue is not None: + if isinstance(new_type, cwl.ArraySchema): + if pickValue in ["first_non_null", "the_only_non_null"]: + new_type = new_type.items + return new_type + new_type = [] + for p, sc in zip(params, scatter_context): + if isinstance(p, str) and not any(_compare_type(t, p) for t in new_type): + cur_type = p + elif hasattr(p, "type_") and not any( + _compare_type(t, p.type_) for t in new_type + ): + cur_type = p.type_ + else: + cur_type = None + if cur_type is not None: + if sc is not None: + if sc[1] == "nested_crossproduct": + for _ in range(sc[0]): + cur_type = cwl.ArraySchema(items=cur_type, type_="array") + else: + cur_type = cwl.ArraySchema(items=cur_type, type_="array") + new_type.append(cur_type) + if len(new_type) == 1: + new_type = new_type[0] + if linkMerge == "merge_nested": + new_type = cwl.ArraySchema(items=new_type, type_="array") + elif linkMerge == "merge_flattened": + new_type = merge_flatten_type(new_type) + elif isinstance(sourcenames, list) and len(sourcenames) > 1: + new_type = cwl.ArraySchema(items=new_type, type_="array") + if pickValue is not None: + if isinstance(new_type, cwl.ArraySchema): + if pickValue in ["first_non_null", "the_only_non_null"]: + new_type = new_type.items + return new_type + + +def param_for_source_id( + process: Union[cwl.CommandLineTool, cwl.Workflow, cwl.ExpressionTool], + sourcenames: Union[str, list[str]], + parent: Optional[cwl.Workflow] = None, + scatter_context: Optional[list[Optional[tuple[int, str]]]] = None, +) -> Union[list[cwl.WorkflowInputParameter], cwl.WorkflowInputParameter]: + """Find the process input parameter that matches one of the given sourcenames.""" + if isinstance(sourcenames, str): + sourcenames = [sourcenames] + params: list[cwl.WorkflowInputParameter] = [] + for sourcename in sourcenames: + if not isinstance(process, cwl.Workflow): + for param in process.inputs: + if param.id.split("#")[-1] == sourcename.split("#")[-1]: + params.append(param) + if scatter_context is not None: + scatter_context.append(None) + targets = [process] + if parent: + targets.append(parent) + for target in targets: + if isinstance(target, cwl.Workflow): + for inp in target.inputs: + if inp.id.split("#")[-1] == sourcename.split("#")[-1]: + params.append(inp) + if scatter_context is not None: + scatter_context.append(None) + for step in target.steps: + if ( + "/".join(sourcename.split("#")[-1].split("/")[:-1]) + == step.id.split("#")[-1] + and step.out + ): + step_run = cwl_utils.parser.utils.load_step(step) + cwl_utils.parser.utils.convert_stdstreams_to_files(step_run) + for outp in step.out: + outp_id = outp if isinstance(outp, str) else outp.id + if ( + outp_id.split("#")[-1].split("/")[-1] + == sourcename.split("#")[-1].split("/")[-1] + ): + if step_run and step_run.outputs: + for output in step_run.outputs: + if ( + output.id.split("#")[-1].split("/")[-1] + == sourcename.split("#")[-1].split("/")[-1] + ): + params.append(output) + if scatter_context is not None: + if isinstance( + step, cwl.ScatterWorkflowStep + ): + if isinstance(step.scatter, str): + scatter_context.append( + ( + 1, + step.scatterMethod + or "dotproduct", + ) + ) + elif isinstance( + step.scatter, MutableSequence + ): + scatter_context.append( + ( + len(step.scatter), + step.scatterMethod + or "dotproduct", + ) + ) + else: + scatter_context.append(None) + if len(params) == 1: + return params[0] + elif len(params) > 1: + return params + raise WorkflowException( + "param {} not found in {}\n{}.".format( + sourcename, + yaml_dumps(cwl.save(process)), + (f" or\n {yaml_dumps(cwl.save(parent))}" if parent is not None else ""), + ) + ) diff --git a/cwl_utils/parser/utils.py b/cwl_utils/parser/utils.py index 9d5b60f2..e67dba3e 100644 --- a/cwl_utils/parser/utils.py +++ b/cwl_utils/parser/utils.py @@ -28,6 +28,8 @@ cwl_v1_1_utils, cwl_v1_2, cwl_v1_2_utils, + cwl_v1_3, + cwl_v1_3_utils, ) _logger = logging.getLogger("cwl_utils") @@ -41,6 +43,8 @@ def convert_stdstreams_to_files(process: Process) -> None: cwl_v1_1_utils.convert_stdstreams_to_files(process) elif isinstance(process, cwl_v1_2.CommandLineTool): cwl_v1_2_utils.convert_stdstreams_to_files(process) + elif isinstance(process, cwl_v1_3.CommandLineTool): + cwl_v1_3_utils.convert_stdstreams_to_files(process) def load_inputfile_by_uri( @@ -70,6 +74,8 @@ def load_inputfile_by_uri( loadingOptions = cwl_v1_1.LoadingOptions(fileuri=baseuri) elif version == "v1.2": loadingOptions = cwl_v1_2.LoadingOptions(fileuri=baseuri) + elif version == "v1.3.0-dev1": + loadingOptions = cwl_v1_3.LoadingOptions(fileuri=baseuri) else: raise ValidationException( f"Version error. Did not recognise {version} as a CWL version" @@ -124,6 +130,10 @@ def load_inputfile_by_yaml( result = cwl_v1_2_utils.load_inputfile_by_yaml( yaml, uri, cast(Optional[cwl_v1_2.LoadingOptions], loadingOptions) ) + elif version == "v1.3.0-dev1": + result = cwl_v1_3_utils.load_inputfile_by_yaml( + yaml, uri, cast(Optional[cwl_v1_3.LoadingOptions], loadingOptions) + ) elif version is None: raise ValidationException("could not get the cwlVersion") else: @@ -185,6 +195,11 @@ def static_checker(workflow: cwl_utils.parser.Workflow) -> None: cwl_v1_2.WorkflowStepOutput(s) if isinstance(s, str) else s for s in step.out ] + elif workflow.cwlVersion == "v1.3.0-dev1": + step_outs = [ + cwl_v1_3.WorkflowStepOutput(s) if isinstance(s, str) else s + for s in step.out + ] else: raise Exception(f"Unsupported CWL version {workflow.cwlVersion}") step_outputs.extend(step_outs) @@ -232,6 +247,14 @@ def static_checker(workflow: cwl_utils.parser.Workflow) -> None: workflow_outputs_val = cwl_v1_2_utils.check_all_types( src_dict, workflow.outputs, param_to_step, type_dict ) + elif workflow.cwlVersion == "v1.3.0-dev1": + parser = cwl_v1_3 + step_inputs_val = cwl_v1_3_utils.check_all_types( + src_dict, step_inputs, param_to_step, type_dict + ) + workflow_outputs_val = cwl_v1_3_utils.check_all_types( + src_dict, workflow.outputs, param_to_step, type_dict + ) else: raise Exception(f"Unsupported CWL version {workflow.cwlVersion}") @@ -378,6 +401,21 @@ def type_for_source( linkMerge, pickValue, ) + elif process.cwlVersion == "v1.3.0-dev1": + return cwl_v1_3_utils.type_for_source( + cast( + Union[ + cwl_v1_3.CommandLineTool, + cwl_v1_3.Workflow, + cwl_v1_3.ExpressionTool, + ], + process, + ), + sourcenames, + cast(Optional[cwl_v1_3.Workflow], parent), + linkMerge, + pickValue, + ) elif process.cwlVersion is None: raise ValidationException("could not get the cwlVersion") else: @@ -402,6 +440,10 @@ def type_for_step_input( return cwl_v1_2_utils.type_for_step_input( cast(cwl_v1_2.WorkflowStep, step), cast(cwl_v1_2.WorkflowStepInput, in_) ) + elif cwlVersion == "v1.3.0-dev1": + return cwl_v1_3_utils.type_for_step_input( + cast(cwl_v1_3.WorkflowStep, step), cast(cwl_v1_3.WorkflowStepInput, in_) + ) def type_for_step_output(step: WorkflowStep, sourcename: str, cwlVersion: str) -> Any: @@ -418,6 +460,10 @@ def type_for_step_output(step: WorkflowStep, sourcename: str, cwlVersion: str) - return cwl_v1_2_utils.type_for_step_output( cast(cwl_v1_2.WorkflowStep, step), sourcename ) + elif cwlVersion == "v1.3.0-dev1": + return cwl_v1_3_utils.type_for_step_output( + cast(cwl_v1_3.WorkflowStep, step), sourcename + ) def param_for_source_id( @@ -442,6 +488,10 @@ def param_for_source_id( list[cwl_utils.parser.cwl_v1_2.WorkflowInputParameter], cwl_utils.parser.cwl_v1_2.WorkflowInputParameter, ], + Union[ + list[cwl_utils.parser.cwl_v1_3.WorkflowInputParameter], + cwl_utils.parser.cwl_v1_3.WorkflowInputParameter, + ], ]: if process.cwlVersion == "v1.0": return cwl_utils.parser.cwl_v1_0_utils.param_for_source_id( @@ -485,6 +535,20 @@ def param_for_source_id( cast(cwl_utils.parser.cwl_v1_2.Workflow, parent), scatter_context, ) + elif process.cwlVersion == "v1.3.0-dev1": + return cwl_utils.parser.cwl_v1_3_utils.param_for_source_id( + cast( + Union[ + cwl_utils.parser.cwl_v1_3.CommandLineTool, + cwl_utils.parser.cwl_v1_3.Workflow, + cwl_utils.parser.cwl_v1_3.ExpressionTool, + ], + process, + ), + sourcenames, + cast(cwl_utils.parser.cwl_v1_3.Workflow, parent), + scatter_context, + ) elif process.cwlVersion is None: raise ValidationException("could not get the cwlVersion") else: diff --git a/testdata/all-output-loop_v1_3.cwl b/testdata/all-output-loop_v1_3.cwl new file mode 100755 index 00000000..80eb5952 --- /dev/null +++ b/testdata/all-output-loop_v1_3.cwl @@ -0,0 +1,28 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: Workflow +requirements: + InlineJavascriptRequirement: {} +inputs: + i1: int +outputs: + o1: + type: int[] + outputSource: subworkflow/o1 +steps: + subworkflow: + when: $(inputs.i1 < 10) + loop: + i1: o1 + outputMethod: all_iterations + run: + class: ExpressionTool + inputs: + i1: int + outputs: + o1: int + expression: > + ${return {'o1': inputs.i1 + 1};} + in: + i1: i1 + out: [o1] \ No newline at end of file diff --git a/testdata/cond-single-source-wf-003.1.cwl b/testdata/cond-single-source-wf-003.1.v1_2.cwl similarity index 92% rename from testdata/cond-single-source-wf-003.1.cwl rename to testdata/cond-single-source-wf-003.1.v1_2.cwl index 78e780d9..2e5f4269 100755 --- a/testdata/cond-single-source-wf-003.1.cwl +++ b/testdata/cond-single-source-wf-003.1.v1_2.cwl @@ -10,7 +10,7 @@ steps: in: in1: val a_new_var: val - run: foo-array.cwl + run: foo-array.v1_2.cwl when: $(inputs.in1 < 1) out: [out1] diff --git a/testdata/cond-single-source-wf-003.1.v1_3.cwl b/testdata/cond-single-source-wf-003.1.v1_3.cwl new file mode 100755 index 00000000..93e0b7cd --- /dev/null +++ b/testdata/cond-single-source-wf-003.1.v1_3.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +inputs: + val: int + +steps: + + step1: + in: + in1: val + a_new_var: val + run: foo-array.v1_3.cwl + when: $(inputs.in1 < 1) + out: [out1] + +outputs: + out1: + type: string + outputSource: step1/out1 + pickValue: first_non_null + +requirements: + InlineJavascriptRequirement: {} diff --git a/testdata/cond-single-source-wf-004.1.cwl b/testdata/cond-single-source-wf-004.1.v1_2.cwl similarity index 92% rename from testdata/cond-single-source-wf-004.1.cwl rename to testdata/cond-single-source-wf-004.1.v1_2.cwl index 0d48f276..2a8b7f55 100755 --- a/testdata/cond-single-source-wf-004.1.cwl +++ b/testdata/cond-single-source-wf-004.1.v1_2.cwl @@ -10,7 +10,7 @@ steps: in: in1: val a_new_var: val - run: foo-array.cwl + run: foo-array.v1_2.cwl when: $(inputs.in1 < 1) out: [out1] diff --git a/testdata/cond-single-source-wf-004.1.v1_3.cwl b/testdata/cond-single-source-wf-004.1.v1_3.cwl new file mode 100755 index 00000000..35baf94a --- /dev/null +++ b/testdata/cond-single-source-wf-004.1.v1_3.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +inputs: + val: int + +steps: + + step1: + in: + in1: val + a_new_var: val + run: foo-array.v1_3.cwl + when: $(inputs.in1 < 1) + out: [out1] + +outputs: + out1: + type: string + outputSource: step1/out1 + pickValue: the_only_non_null + +requirements: + InlineJavascriptRequirement: {} diff --git a/testdata/cond-single-source-wf-005.1.cwl b/testdata/cond-single-source-wf-005.1.v1_2.cwl similarity index 92% rename from testdata/cond-single-source-wf-005.1.cwl rename to testdata/cond-single-source-wf-005.1.v1_2.cwl index ab9bd75c..813a98f5 100755 --- a/testdata/cond-single-source-wf-005.1.cwl +++ b/testdata/cond-single-source-wf-005.1.v1_2.cwl @@ -10,7 +10,7 @@ steps: in: in1: val a_new_var: val - run: foo-array.cwl + run: foo-array.v1_2.cwl when: $(inputs.in1 < 1) out: [out1] diff --git a/testdata/cond-single-source-wf-005.1.v1_3.cwl b/testdata/cond-single-source-wf-005.1.v1_3.cwl new file mode 100755 index 00000000..4bf78f8d --- /dev/null +++ b/testdata/cond-single-source-wf-005.1.v1_3.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +inputs: + val: int + +steps: + + step1: + in: + in1: val + a_new_var: val + run: foo-array.v1_3.cwl + when: $(inputs.in1 < 1) + out: [out1] + +outputs: + out1: + type: string[] + outputSource: step1/out1 + pickValue: all_non_null + +requirements: + InlineJavascriptRequirement: {} diff --git a/testdata/cond-wf-003.1.cwl b/testdata/cond-wf-003.1.v1_2.cwl similarity index 100% rename from testdata/cond-wf-003.1.cwl rename to testdata/cond-wf-003.1.v1_2.cwl diff --git a/testdata/cond-wf-003.1.v1_3.cwl b/testdata/cond-wf-003.1.v1_3.cwl new file mode 100755 index 00000000..42031a86 --- /dev/null +++ b/testdata/cond-wf-003.1.v1_3.cwl @@ -0,0 +1,35 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +inputs: + val: int + +steps: + + step1: + in: + in1: val + a_new_var: val + run: foo.cwl + when: $(inputs.in1 < 1) + out: [out1] + + step2: + in: + in1: val + a_new_var: val + run: foo.cwl + when: $(inputs.a_new_var > 2) + out: [out1] + +outputs: + out1: + type: string + outputSource: + - step1/out1 + - step2/out1 + pickValue: first_non_null + +requirements: + InlineJavascriptRequirement: {} + MultipleInputFeatureRequirement: {} diff --git a/testdata/cond-wf-004.1.cwl b/testdata/cond-wf-004.1.v1_2.cwl similarity index 100% rename from testdata/cond-wf-004.1.cwl rename to testdata/cond-wf-004.1.v1_2.cwl diff --git a/testdata/cond-wf-004.1.v1_3.cwl b/testdata/cond-wf-004.1.v1_3.cwl new file mode 100755 index 00000000..435886b2 --- /dev/null +++ b/testdata/cond-wf-004.1.v1_3.cwl @@ -0,0 +1,35 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +inputs: + val: int + +steps: + + step1: + in: + in1: val + a_new_var: val + run: foo.cwl + when: $(inputs.in1 < 1) + out: [out1] + + step2: + in: + in1: val + a_new_var: val + run: foo.cwl + when: $(inputs.a_new_var > 2) + out: [out1] + +outputs: + out1: + type: string + outputSource: + - step1/out1 + - step2/out1 + pickValue: the_only_non_null + +requirements: + InlineJavascriptRequirement: {} + MultipleInputFeatureRequirement: {} diff --git a/testdata/cond-wf-005.1.cwl b/testdata/cond-wf-005.1.v1_2.cwl similarity index 100% rename from testdata/cond-wf-005.1.cwl rename to testdata/cond-wf-005.1.v1_2.cwl diff --git a/testdata/cond-wf-005.1.v1_3.cwl b/testdata/cond-wf-005.1.v1_3.cwl new file mode 100755 index 00000000..45c91b2c --- /dev/null +++ b/testdata/cond-wf-005.1.v1_3.cwl @@ -0,0 +1,35 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +inputs: + val: int + +steps: + + step1: + in: + in1: val + a_new_var: val + run: foo.cwl + when: $(inputs.in1 < 1) + out: [out1] + + step2: + in: + in1: val + a_new_var: val + run: foo.cwl + when: $(inputs.a_new_var > 2) + out: [out1] + +outputs: + out1: + type: string[] + outputSource: + - step1/out1 + - step2/out1 + pickValue: all_non_null + +requirements: + InlineJavascriptRequirement: {} + MultipleInputFeatureRequirement: {} diff --git a/testdata/count-lines6-single-source-wf_v1_3.cwl b/testdata/count-lines6-single-source-wf_v1_3.cwl new file mode 100755 index 00000000..b6ed4e21 --- /dev/null +++ b/testdata/count-lines6-single-source-wf_v1_3.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 + +requirements: + - class: ScatterFeatureRequirement + +inputs: + file1: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_3.cwl + scatter: file1 + in: + file1: + source: file1 + linkMerge: merge_nested + out: [output] \ No newline at end of file diff --git a/testdata/count-lines6-wf_v1_3.cwl b/testdata/count-lines6-wf_v1_3.cwl new file mode 100755 index 00000000..2533bbb1 --- /dev/null +++ b/testdata/count-lines6-wf_v1_3.cwl @@ -0,0 +1,26 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 + +requirements: + - class: ScatterFeatureRequirement + - class: MultipleInputFeatureRequirement + +inputs: + file1: File[] + file2: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_3.cwl + scatter: file1 + in: + file1: + source: [file1, file2] + linkMerge: merge_nested + out: [output] \ No newline at end of file diff --git a/testdata/count-lines7-single-source-wf_v1_3.cwl b/testdata/count-lines7-single-source-wf_v1_3.cwl new file mode 100755 index 00000000..8097cb76 --- /dev/null +++ b/testdata/count-lines7-single-source-wf_v1_3.cwl @@ -0,0 +1,20 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 + +inputs: + file1: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_3.cwl + in: + file1: + source: file1 + linkMerge: merge_flattened + out: [output] \ No newline at end of file diff --git a/testdata/count-lines7-wf_v1_3.cwl b/testdata/count-lines7-wf_v1_3.cwl new file mode 100755 index 00000000..32dab91b --- /dev/null +++ b/testdata/count-lines7-wf_v1_3.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 + +requirements: + - class: MultipleInputFeatureRequirement + +inputs: + file1: File[] + file2: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_3.cwl + in: + file1: + source: [file1, file2] + linkMerge: merge_flattened + out: [output] \ No newline at end of file diff --git a/testdata/echo_v1_3.cwl b/testdata/echo_v1_3.cwl new file mode 100755 index 00000000..69b20365 --- /dev/null +++ b/testdata/echo_v1_3.cwl @@ -0,0 +1,14 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: CommandLineTool +baseCommand: echo +inputs: + echo_in: + type: + - string + - string[] + inputBinding: {} +stdout: out.txt +outputs: + txt: + type: stdout diff --git a/testdata/foo-array.cwl b/testdata/foo-array.v1_2.cwl similarity index 100% rename from testdata/foo-array.cwl rename to testdata/foo-array.v1_2.cwl diff --git a/testdata/foo-array.v1_3.cwl b/testdata/foo-array.v1_3.cwl new file mode 100755 index 00000000..5b32f548 --- /dev/null +++ b/testdata/foo-array.v1_3.cwl @@ -0,0 +1,13 @@ +#!/usr/bin/env cwl-runner +class: CommandLineTool +cwlVersion: v1.3.0-dev1 +inputs: + in1: int +baseCommand: [echo] +outputs: + out1: + type: string[] + outputBinding: + outputEval: ${ return [foo, inputs.in1]; } +requirements: + InlineJavascriptRequirement: {} diff --git a/testdata/formattest2.cwl b/testdata/formattest2_v1_2.cwl similarity index 100% rename from testdata/formattest2.cwl rename to testdata/formattest2_v1_2.cwl diff --git a/testdata/formattest2_v1_3.cwl b/testdata/formattest2_v1_3.cwl new file mode 100755 index 00000000..a65de658 --- /dev/null +++ b/testdata/formattest2_v1_3.cwl @@ -0,0 +1,27 @@ +#!/usr/bin/env cwl-runner +$namespaces: + edam: http://edamontology.org/ +$schemas: + - EDAM_subset.owl +class: CommandLineTool +cwlVersion: v1.3.0-dev1 +doc: "Reverse each line using the `rev` command" +hints: + DockerRequirement: + dockerPull: docker.io/debian:stable-slim + +inputs: + input: + type: File + inputBinding: {} + format: edam:format_2330 + +outputs: + output: + type: File + outputBinding: + glob: output.txt + format: $(inputs.input.format) + +baseCommand: rev +stdout: output.txt diff --git a/testdata/map-ordering-v1_3.cwl b/testdata/map-ordering-v1_3.cwl new file mode 100755 index 00000000..2a8e85ef --- /dev/null +++ b/testdata/map-ordering-v1_3.cwl @@ -0,0 +1,38 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: Workflow +inputs: + 09first_input: string + 05second_input: int + 01third_input: File +steps: + zz_step_one: + run: + class: ExpressionTool + inputs: [] + outputs: [] + expression: ${return {}; } + requirements: + InlineJavascriptRequirement: {} + in: [] + out: [] + 00_step_two: + out: [] + run: + inputs: [] + requirements: + InlineJavascriptRequirement: {} + outputs: [] + expression: ${return {}; } + class: ExpressionTool + in: [] +outputs: + zz_first_output: + type: File + outputSource: 01third_input + ll_second_output: + type: string + outputSource: 09first_input + aa_third_output: + type: int + outputSource: 05second_input diff --git a/testdata/record-output-wf_v1_3.cwl b/testdata/record-output-wf_v1_3.cwl new file mode 100755 index 00000000..cdd80deb --- /dev/null +++ b/testdata/record-output-wf_v1_3.cwl @@ -0,0 +1,33 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 + +inputs: + irec: + type: + name: irec + type: record + fields: + - name: ifoo + type: File + - name: ibar + type: File + +outputs: + orec: + type: + name: orec + type: record + fields: + - name: ofoo + type: File + - name: obar + type: File + outputSource: step1/orec + +steps: + step1: + run: record-output_v1_3.cwl + in: + irec: irec + out: [orec] \ No newline at end of file diff --git a/testdata/record-output_v1_3.cwl b/testdata/record-output_v1_3.cwl new file mode 100755 index 00000000..6654095f --- /dev/null +++ b/testdata/record-output_v1_3.cwl @@ -0,0 +1,39 @@ +#!/usr/bin/env cwl-runner +class: CommandLineTool +cwlVersion: v1.3.0-dev1 +requirements: + - class: ShellCommandRequirement +inputs: + irec: + type: + name: irec + type: record + fields: + - name: ifoo + type: File + inputBinding: + position: 2 + - name: ibar + type: File + inputBinding: + position: 6 +outputs: + orec: + type: + name: orec + type: record + fields: + - name: ofoo + type: File + outputBinding: + glob: foo + - name: obar + type: File + outputBinding: + glob: bar +arguments: + - {valueFrom: "cat", position: 1} + - {valueFrom: "> foo", position: 3, shellQuote: false} + - {valueFrom: "&&", position: 4, shellQuote: false} + - {valueFrom: "cat", position: 5} + - {valueFrom: "> bar", position: 7, shellQuote: false} diff --git a/testdata/scatter-wf1_v1_3.cwl b/testdata/scatter-wf1_v1_3.cwl new file mode 100755 index 00000000..3527722f --- /dev/null +++ b/testdata/scatter-wf1_v1_3.cwl @@ -0,0 +1,37 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: Workflow +inputs: + inp: string[] +outputs: + out: + type: string[] + outputSource: step1/echo_out + +requirements: + - class: ScatterFeatureRequirement + +steps: + step1: + in: + echo_in: inp + out: [echo_out] + scatter: echo_in + run: + class: CommandLineTool + inputs: + echo_in: + type: string + inputBinding: {} + outputs: + echo_out: + type: string + outputBinding: + glob: "step1_out" + loadContents: true + outputEval: $(self[0].contents) + baseCommand: "echo" + arguments: + - "-n" + - "foo" + stdout: "step1_out" \ No newline at end of file diff --git a/testdata/scatter-wf2_v1_3.cwl b/testdata/scatter-wf2_v1_3.cwl new file mode 100755 index 00000000..522caf85 --- /dev/null +++ b/testdata/scatter-wf2_v1_3.cwl @@ -0,0 +1,51 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: Workflow + +inputs: + inp1: string[] + inp2: string[] + +outputs: + out: + outputSource: step1/echo_out + type: + type: array + items: + type: array + items: string + +requirements: + - class: ScatterFeatureRequirement + +steps: + step1: + in: + echo_in1: inp1 + echo_in2: inp2 + out: [echo_out] + + scatter: [echo_in1, echo_in2] + scatterMethod: nested_crossproduct + run: + class: CommandLineTool + id: step1command + inputs: + echo_in1: + type: string + inputBinding: {} + echo_in2: + type: string + inputBinding: {} + outputs: + echo_out: + type: string + outputBinding: + glob: "step1_out" + loadContents: true + outputEval: $(self[0].contents) + baseCommand: "echo" + arguments: + - "-n" + - "foo" + stdout: step1_out \ No newline at end of file diff --git a/testdata/scatter-wf3_v1_3.cwl b/testdata/scatter-wf3_v1_3.cwl new file mode 100755 index 00000000..e188487d --- /dev/null +++ b/testdata/scatter-wf3_v1_3.cwl @@ -0,0 +1,48 @@ +#!/usr/bin/env cwl-runner + +cwlVersion: v1.3.0-dev1 +$graph: + +- id: echo + class: CommandLineTool + inputs: + echo_in1: + type: string + inputBinding: {} + echo_in2: + type: string + inputBinding: {} + outputs: + echo_out: + type: string + outputBinding: + glob: "step1_out" + loadContents: true + outputEval: $(self[0].contents) + baseCommand: "echo" + arguments: ["-n", "foo"] + stdout: step1_out + +- id: main + class: Workflow + inputs: + inp1: string[] + inp2: string[] + requirements: + - class: ScatterFeatureRequirement + steps: + step1: + scatter: [echo_in1, echo_in2] + scatterMethod: flat_crossproduct + in: + echo_in1: inp1 + echo_in2: inp2 + out: [echo_out] + run: "#echo" + + outputs: + out: + outputSource: step1/echo_out + type: + type: array + items: string \ No newline at end of file diff --git a/testdata/single-var-loop_v1_3.cwl b/testdata/single-var-loop_v1_3.cwl new file mode 100755 index 00000000..e812e26e --- /dev/null +++ b/testdata/single-var-loop_v1_3.cwl @@ -0,0 +1,28 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: Workflow +requirements: + InlineJavascriptRequirement: {} +inputs: + i1: int +outputs: + o1: + type: int + outputSource: subworkflow/o1 +steps: + subworkflow: + when: $(inputs.i1 < 10) + loop: + i1: o1 + outputMethod: last_iteration + run: + class: ExpressionTool + inputs: + i1: int + outputs: + o1: int + expression: > + ${return {'o1': inputs.i1 + 1};} + in: + i1: i1 + out: [o1] \ No newline at end of file diff --git a/testdata/stdout-wf_v1_3.cwl b/testdata/stdout-wf_v1_3.cwl new file mode 100755 index 00000000..767beec3 --- /dev/null +++ b/testdata/stdout-wf_v1_3.cwl @@ -0,0 +1,20 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +inputs: + letters0: + type: string + default: "a0" + +outputs: + all: + type: File + outputSource: echo_w/txt + +steps: + echo_w: + run: echo_v1_2.cwl + in: + echo_in: letters0 + out: [txt] + diff --git a/testdata/step-valuefrom2-wf_v1_3.cwl b/testdata/step-valuefrom2-wf_v1_3.cwl new file mode 100755 index 00000000..4401f039 --- /dev/null +++ b/testdata/step-valuefrom2-wf_v1_3.cwl @@ -0,0 +1,41 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +requirements: + - class: StepInputExpressionRequirement + - class: InlineJavascriptRequirement + - class: MultipleInputFeatureRequirement + +inputs: + a: int + b: int + +outputs: + val: + type: string + outputSource: step1/echo_out + +steps: + step1: + run: + id: echo + class: CommandLineTool + inputs: + c: + type: int + inputBinding: {} + outputs: + echo_out: + type: string + outputBinding: + glob: "step1_out" + loadContents: true + outputEval: $(self[0].contents) + baseCommand: "echo" + stdout: step1_out + + in: + c: + source: [a, b] + valueFrom: "$(self[0] + self[1])" + out: [echo_out] diff --git a/testdata/step-valuefrom3-wf_v1_3.cwl b/testdata/step-valuefrom3-wf_v1_3.cwl new file mode 100755 index 00000000..722cd52f --- /dev/null +++ b/testdata/step-valuefrom3-wf_v1_3.cwl @@ -0,0 +1,41 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +requirements: + - class: StepInputExpressionRequirement + - class: InlineJavascriptRequirement + +inputs: + a: int + b: int + +outputs: + val: + type: string + outputSource: step1/echo_out + +steps: + step1: + run: + id: echo + class: CommandLineTool + inputs: + c: + type: int + inputBinding: {} + outputs: + echo_out: + type: string + outputBinding: + glob: "step1_out" + loadContents: true + outputEval: $(self[0].contents) + baseCommand: "echo" + stdout: step1_out + + in: + a: a + b: b + c: + valueFrom: "$(inputs.a + inputs.b)" + out: [echo_out] diff --git a/testdata/step_valuefrom5_wf_v1_3.cwl b/testdata/step_valuefrom5_wf_v1_3.cwl new file mode 100755 index 00000000..7c4e12fb --- /dev/null +++ b/testdata/step_valuefrom5_wf_v1_3.cwl @@ -0,0 +1,74 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +requirements: + StepInputExpressionRequirement: {} + +inputs: + file1: File + +outputs: + val1: + type: string + outputSource: step1/echo_out + val2: + type: string + outputSource: step2/echo_out + +steps: + step1: + run: + class: CommandLineTool + + inputs: + name: + type: string + inputBinding: {} + + outputs: + echo_out: + type: string + outputBinding: + glob: step1_out + loadContents: true + outputEval: $(self[0].contents) + echo_out_file: + type: File + outputBinding: + glob: step1_out + + baseCommand: echo + stdout: step1_out + + in: + name: + source: file1 + valueFrom: $(self.basename) + out: [echo_out, echo_out_file] + + + step2: + run: + class: CommandLineTool + + inputs: + name: + type: string + inputBinding: {} + + outputs: + echo_out: + type: string + outputBinding: + glob: step1_out + loadContents: true + outputEval: $(self[0].contents) + + baseCommand: echo + stdout: step1_out + + in: + name: + source: step1/echo_out_file + valueFrom: $(self.basename) + out: [echo_out] diff --git a/testdata/step_valuefrom5_wf_with_id_v1_3.cwl b/testdata/step_valuefrom5_wf_with_id_v1_3.cwl new file mode 100755 index 00000000..e3a2b5e5 --- /dev/null +++ b/testdata/step_valuefrom5_wf_with_id_v1_3.cwl @@ -0,0 +1,75 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +requirements: + StepInputExpressionRequirement: {} + +inputs: + file1: File + +outputs: + val1: + type: string + outputSource: step1/echo_out + val2: + type: string + outputSource: step2/echo_out + +steps: + step1: + run: + id: explicit/id + class: CommandLineTool + + inputs: + name: + type: string + inputBinding: {} + + outputs: + echo_out: + type: string + outputBinding: + glob: step1_out + loadContents: true + outputEval: $(self[0].contents) + echo_out_file: + type: File + outputBinding: + glob: step1_out + + baseCommand: echo + stdout: step1_out + + in: + name: + source: file1 + valueFrom: $(self.basename) + out: [echo_out, echo_out_file] + + + step2: + run: + class: CommandLineTool + + inputs: + name: + type: string + inputBinding: {} + + outputs: + echo_out: + type: string + outputBinding: + glob: step1_out + loadContents: true + outputEval: $(self[0].contents) + + baseCommand: echo + stdout: step1_out + + in: + name: + source: step1/echo_out_file + valueFrom: $(self.basename) + out: [echo_out] diff --git a/testdata/wc3-tool_v1_3.cwl b/testdata/wc3-tool_v1_3.cwl new file mode 100755 index 00000000..388826a8 --- /dev/null +++ b/testdata/wc3-tool_v1_3.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: CommandLineTool +cwlVersion: v1.3.0-dev1 + +requirements: + - class: InlineJavascriptRequirement + +inputs: + file1: + type: File[] + inputBinding: {} +outputs: + output: + type: int + outputBinding: + glob: output.txt + loadContents: true + outputEval: | + ${ + var s = self[0].contents.split(/\r?\n/); + return parseInt(s[s.length-2]); + } +stdout: output.txt +baseCommand: wc \ No newline at end of file diff --git a/testdata/wf2.cwl b/testdata/wf2.v1_2.cwl similarity index 100% rename from testdata/wf2.cwl rename to testdata/wf2.v1_2.cwl diff --git a/testdata/wf2.v1_3.cwl b/testdata/wf2.v1_3.cwl new file mode 100755 index 00000000..a986555c --- /dev/null +++ b/testdata/wf2.v1_3.cwl @@ -0,0 +1,30 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.3.0-dev1 +inputs: + in1: types/testtypes.yml#my_boolean_array + in2: + type: types/testtypes.yml#my_enum + +steps: + s1: + run: remote-cwl/wf1.cwl + in: + - id: in1 + source: "#in1" + out: [out1] + s2: + run: remote-cwl/tool1.cwl + in: + in1: s1/out1 + out: [out1] + +outputs: + out1: + type: string + outputSource: "#s2/out1" + +requirements: + SchemaDefRequirement: + types: + - $import: types/testtypes.yml diff --git a/testdata/workflow_input_format_expr_v1_3.cwl b/testdata/workflow_input_format_expr_v1_3.cwl new file mode 100755 index 00000000..1e34adf0 --- /dev/null +++ b/testdata/workflow_input_format_expr_v1_3.cwl @@ -0,0 +1,36 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: Workflow + +requirements: + InlineJavascriptRequirement: {} + +inputs: + first: + type: File + format: | + ${ return "http://edamontology.org/format_3016"; } + default: + class: File + basename: default + format: http://edamontology.org/format_3016 + contents: "42" + +steps: + format_extract: + in: + target: first + run: + class: ExpressionTool + inputs: + target: File + expression: | + ${ return {"result": inputs.target.format}; } + outputs: + result: string + out: [ result ] + +outputs: + format_uri: + type: string + outputSource: format_extract/result diff --git a/testdata/workflow_input_sf_expr_array_v1_3.cwl b/testdata/workflow_input_sf_expr_array_v1_3.cwl new file mode 100755 index 00000000..81ae13f1 --- /dev/null +++ b/testdata/workflow_input_sf_expr_array_v1_3.cwl @@ -0,0 +1,40 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: Workflow + +requirements: + InlineJavascriptRequirement: {} + +inputs: + first: + type: File + secondaryFiles: + - | + ${ return self.basename+".2"; } + default: + class: File + basename: default.txt + contents: "42" + secondaryFiles: + - class: File + basename: default.txt.2 + contents: "23" + +steps: + sf_extract: + in: + target: first + run: + class: ExpressionTool + inputs: + target: File + expression: | + ${ return {"result": inputs.target.secondaryFiles[0].basename}; } + outputs: + result: string + out: [ result ] + +outputs: + sf_name: + type: string + outputSource: sf_extract/result diff --git a/testdata/workflow_input_sf_expr_v1_3.cwl b/testdata/workflow_input_sf_expr_v1_3.cwl new file mode 100755 index 00000000..d903a91a --- /dev/null +++ b/testdata/workflow_input_sf_expr_v1_3.cwl @@ -0,0 +1,39 @@ +#!/usr/bin/env cwl-runner +cwlVersion: v1.3.0-dev1 +class: Workflow + +requirements: + InlineJavascriptRequirement: {} + +inputs: + first: + type: File + secondaryFiles: | + ${ return self.basename+".2"; } + default: + class: File + basename: default.txt + contents: "42" + secondaryFiles: + - class: File + basename: default.txt.2 + contents: "23" + +steps: + sf_extract: + in: + target: first + run: + class: ExpressionTool + inputs: + target: File + expression: | + ${ return {"result": inputs.target.secondaryFiles[0].basename}; } + outputs: + result: string + out: [ result ] + +outputs: + sf_name: + type: string + outputSource: sf_extract/result diff --git a/tests/test_etools_to_clt.py b/tests/test_etools_to_clt.py index 9a2da6e9..de291183 100644 --- a/tests/test_etools_to_clt.py +++ b/tests/test_etools_to_clt.py @@ -16,9 +16,11 @@ import cwl_utils.parser.cwl_v1_0 as parser import cwl_utils.parser.cwl_v1_1 as parser1 import cwl_utils.parser.cwl_v1_2 as parser2 +import cwl_utils.parser.cwl_v1_3 as parser3 from cwl_utils.cwl_v1_0_expression_refactor import traverse as traverse0 from cwl_utils.cwl_v1_1_expression_refactor import traverse as traverse1 from cwl_utils.cwl_v1_2_expression_refactor import traverse as traverse2 +from cwl_utils.cwl_v1_3_expression_refactor import traverse as traverse3 from cwl_utils.errors import WorkflowException from cwl_utils.expression_refactor import run as expression_refactor @@ -144,6 +146,46 @@ def test_v1_2_workflow_top_level_sf_expr_array() -> None: ) +def test_v1_3_workflow_top_level_format_expr() -> None: + """Test for the correct error when converting a format expression in a workflow level input.""" + with raises(WorkflowException, match=r".*format specification.*"): + result, modified = traverse3( + parser3.load_document( + get_data("testdata/workflow_input_format_expr_v1_3.cwl") + ), + False, + False, + False, + False, + ) + + +def test_v1_3_workflow_top_level_sf_expr() -> None: + """Test for the correct error when converting a secondaryFiles expression in a workflow level input.""" + with raises(WorkflowException, match=r".*secondaryFiles.*"): + result, modified = traverse3( + parser3.load_document(get_data("testdata/workflow_input_sf_expr_v1_3.cwl")), + False, + False, + False, + False, + ) + + +def test_v1_3_workflow_top_level_sf_expr_array() -> None: + """Test for the correct error when converting a secondaryFiles expression (array form) in a workflow level input.""" # noqa: B950 + with raises(WorkflowException, match=r".*secondaryFiles.*"): + result, modified = traverse3( + parser3.load_document( + get_data("testdata/workflow_input_sf_expr_array_v1_3.cwl") + ), + False, + False, + False, + False, + ) + + def test_v1_0_step_valuefrom_expr_multisource() -> None: """Convert a valueFrom expression that has multiple sources.""" result, modified = traverse0( @@ -177,6 +219,17 @@ def test_v1_2_step_valuefrom_expr_multisource() -> None: ) +def test_v1_3_step_valuefrom_expr_multisource() -> None: + """Convert a valueFrom expression that has multiple sources.""" + result, modified = traverse3( + parser3.load_document(get_data("testdata/step-valuefrom2-wf_v1_3.cwl")), + False, + False, + False, + False, + ) + + def test_v1_0_step_valuefrom_expr_sibling_inputs() -> None: """Convert a valueFrom expression from a step input that has uninvolved sibling inputs.""" result, modified = traverse0( @@ -210,10 +263,32 @@ def test_v1_2_step_valuefrom_expr_sibling_inputs() -> None: ) +def test_v1_3_step_valuefrom_expr_sibling_inputs() -> None: + """Convert a valueFrom expression from a step input that has uninvolved sibling inputs.""" + result, modified = traverse3( + parser3.load_document(get_data("testdata/step-valuefrom3-wf_v1_3.cwl")), + False, + False, + False, + False, + ) + + def test_v1_2_workflow_output_pickvalue_expr() -> None: """Convert a workflow output pickValue expression.""" result, modified = traverse2( - parser2.load_document(get_data("testdata/cond-wf-003.1.cwl")), + parser2.load_document(get_data("testdata/cond-wf-003.1.v1_2.cwl")), + False, + False, + False, + False, + ) + + +def test_v1_3_workflow_output_pickvalue_expr() -> None: + """Convert a workflow output pickValue expression.""" + result, modified = traverse3( + parser3.load_document(get_data("testdata/cond-wf-003.1.v1_3.cwl")), False, False, False, @@ -223,7 +298,7 @@ def test_v1_2_workflow_output_pickvalue_expr() -> None: def test_expression_refactor(tmp_path: Path) -> None: """Functional test.""" - input_path = get_data("testdata/cond-wf-003.1.cwl") + input_path = get_data("testdata/cond-wf-003.1.v1_2.cwl") result = expression_refactor([str(tmp_path), input_path]) assert result == 0 diff --git a/tests/test_format.py b/tests/test_format.py index c0320d07..ef98eea2 100644 --- a/tests/test_format.py +++ b/tests/test_format.py @@ -152,7 +152,7 @@ def test_loading_options_graph_property_v1_1() -> None: def test_loading_options_graph_property_v1_2() -> None: """Test that RDFLib Graph representations of $schema properties are correctly loaded, CWL v1.2.""" - uri = get_path("testdata/formattest2.cwl").as_uri() + uri = get_path("testdata/formattest2_v1_2.cwl").as_uri() cwl_obj = load_document_by_uri(uri) assert to_isomorphic(cwl_obj.loadingOptions.graph) == to_isomorphic(EDAM) diff --git a/tests/test_packing.py b/tests/test_packing.py index 611a420e..4c09801a 100644 --- a/tests/test_packing.py +++ b/tests/test_packing.py @@ -15,7 +15,7 @@ def test_port_normalization() -> None: step_in1 = _find(step_s1.get("in"), "id", "in1") assert step_in1["source"] == "in1" - cwl = pack(get_data("testdata/wf2.cwl")) + cwl = pack(get_data("testdata/wf2.v1_2.cwl")) step_s1 = _find(cwl.get("steps", []), "id", "s1") step_in1 = _find(step_s1.get("in"), "id", "in1") assert step_in1["source"] == "in1" @@ -46,7 +46,7 @@ def test_schema_def1() -> None: def test_schema_def2() -> None: - cwl = pack(get_data("testdata/wf2.cwl")) + cwl = pack(get_data("testdata/wf2.v1_2.cwl")) _type = _find(cwl.get("inputs", []), "id", "in2").get("type") assert isinstance(_type, dict) assert _type.get("type") == "enum" diff --git a/tests/test_parser.py b/tests/test_parser.py index 31ebb07c..09d89e8f 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -1,6 +1,7 @@ # SPDX-License-Identifier: Apache-2.0 """Test the load and save functions for CWL.""" +import pytest from pytest import raises from ruamel.yaml.main import YAML @@ -122,37 +123,10 @@ def test_graph_load_all() -> None: assert len(cwl_objs) == 2 -def test_map_ordering_v1_0() -> None: - """Confirm that ID map entries are not sorted during parsing, CWL v1.0.""" - uri = get_path("testdata/map-ordering-v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert cwl_obj.inputs[0].id == f"{uri}#09first_input" - assert cwl_obj.inputs[1].id == f"{uri}#05second_input" - assert cwl_obj.inputs[2].id == f"{uri}#01third_input" - assert cwl_obj.steps[0].id == f"{uri}#zz_step_one" - assert cwl_obj.steps[1].id == f"{uri}#00_step_two" - assert cwl_obj.outputs[0].id == f"{uri}#zz_first_output" - assert cwl_obj.outputs[1].id == f"{uri}#ll_second_output" - assert cwl_obj.outputs[2].id == f"{uri}#aa_third_output" - - -def test_map_ordering_v1_1() -> None: - """Confirm that ID map entries are not sorted during parsing, CWL v1.1.""" - uri = get_path("testdata/map-ordering-v1_1.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert cwl_obj.inputs[0].id == f"{uri}#09first_input" - assert cwl_obj.inputs[1].id == f"{uri}#05second_input" - assert cwl_obj.inputs[2].id == f"{uri}#01third_input" - assert cwl_obj.steps[0].id == f"{uri}#zz_step_one" - assert cwl_obj.steps[1].id == f"{uri}#00_step_two" - assert cwl_obj.outputs[0].id == f"{uri}#zz_first_output" - assert cwl_obj.outputs[1].id == f"{uri}#ll_second_output" - assert cwl_obj.outputs[2].id == f"{uri}#aa_third_output" - - -def test_map_ordering_v1_2() -> None: - """Confirm that ID map entries are not sorted during parsing, CWL v1.2.""" - uri = get_path("testdata/map-ordering-v1_2.cwl").as_uri() +@pytest.mark.parametrize("cwlVersion", ["v1_0", "v1_1", "v1_2", "v1_3"]) +def test_map_ordering(cwlVersion: str) -> None: + """Confirm that ID map entries are not sorted during parsing.""" + uri = get_path(f"testdata/map-ordering-{cwlVersion}.cwl").as_uri() cwl_obj = load_document_by_uri(uri) assert cwl_obj.inputs[0].id == f"{uri}#09first_input" assert cwl_obj.inputs[1].id == f"{uri}#05second_input" diff --git a/tests/test_parser_utils.py b/tests/test_parser_utils.py index d4d24fa7..40e35b48 100644 --- a/tests/test_parser_utils.py +++ b/tests/test_parser_utils.py @@ -14,6 +14,8 @@ import cwl_utils.parser.cwl_v1_1_utils import cwl_utils.parser.cwl_v1_2 import cwl_utils.parser.cwl_v1_2_utils +import cwl_utils.parser.cwl_v1_3 +import cwl_utils.parser.cwl_v1_3_utils import cwl_utils.parser.utils from cwl_utils.errors import WorkflowException from cwl_utils.parser import load_document_by_uri @@ -21,7 +23,7 @@ from .util import get_path -@pytest.mark.parametrize("cwlVersion", ["v1_0", "v1_1", "v1_2"]) +@pytest.mark.parametrize("cwlVersion", ["v1_0", "v1_1", "v1_2", "v1_3"]) def test_static_checker_fail(cwlVersion: str) -> None: """Confirm that static type checker raises expected exception.""" if cwlVersion == "v1_0": @@ -59,19 +61,25 @@ def test_static_checker_fail(cwlVersion: str) -> None: ): cwl_utils.parser.utils.static_checker(cwl_obj5) - if cwlVersion == "v1_2": - uri6 = get_path("testdata/cond-single-source-wf-003.1.cwl").as_uri() + if cwlVersion in ["v1_2", "v1_3"]: + uri6 = get_path( + f"testdata/cond-single-source-wf-003.1.{cwlVersion}.cwl" + ).as_uri() cwl_obj6 = load_document_by_uri(uri6) with pytest.raises(ValidationException, match=".* pickValue is first_non_null"): cwl_utils.parser.utils.static_checker(cwl_obj6) - uri7 = get_path("testdata/cond-single-source-wf-004.1.cwl").as_uri() - cwl_obj7 = load_document_by_uri(uri7) - with pytest.raises(ValidationException, match=".* pickValue is the_only_non_null"): - cwl_utils.parser.utils.static_checker(cwl_obj7) + uri7 = get_path( + f"testdata/cond-single-source-wf-004.1.{cwlVersion}.cwl" + ).as_uri() + cwl_obj7 = load_document_by_uri(uri7) + with pytest.raises( + ValidationException, match=".* pickValue is the_only_non_null" + ): + cwl_utils.parser.utils.static_checker(cwl_obj7) -@pytest.mark.parametrize("cwlVersion", ["v1_0", "v1_1", "v1_2"]) +@pytest.mark.parametrize("cwlVersion", ["v1_0", "v1_1", "v1_2", "v1_3"]) def test_static_checker_success(cwlVersion: str) -> None: """Confirm that static type checker correctly validates workflows.""" test_files = [ @@ -85,16 +93,28 @@ def test_static_checker_success(cwlVersion: str) -> None: f"testdata/count-lines7-wf_{cwlVersion}.cwl", f"testdata/count-lines7-single-source-wf_{cwlVersion}.cwl", ] + if cwlVersion in ["v1_2", "v1_3"]: + test_files.extend( + [ + f"testdata/cond-wf-003.1.{cwlVersion}.cwl", + f"testdata/cond-wf-004.1.{cwlVersion}.cwl", + f"testdata/cond-wf-005.1.{cwlVersion}.cwl", + f"testdata/cond-single-source-wf-005.1.{cwlVersion}.cwl", + f"testdata/wf2.{cwlVersion}.cwl", + ] + ) if cwlVersion == "v1_2": test_files.extend( [ - "testdata/cond-wf-003.1.cwl", - "testdata/cond-wf-004.1.cwl", - "testdata/cond-wf-005.1.cwl", - "testdata/cond-single-source-wf-005.1.cwl", "testdata/extensions/all-output-loop_v1_2.cwl", "testdata/extensions/single-var-loop_v1_2.cwl", - "testdata/wf2.cwl", + ] + ) + if cwlVersion == "v1.3": + test_files.extend( + [ + "testdata/all-output-loop_v1_3.cwl", + "testdata/single-var-loop_v1_3.cwl", ] ) for test_file in test_files: @@ -772,7 +792,7 @@ def test_v1_2_file_content_larger_than_64_kB() -> None: def test_v_1_2_load_inputfile_with_format() -> None: """Test that File object with the `format` field is correctly loaded with CWL v1.2.""" - cwl_uri = get_path("testdata/formattest2.cwl").as_uri() + cwl_uri = get_path("testdata/formattest2_v1_2.cwl").as_uri() cwl_obj = load_document_by_uri(cwl_uri) job_uri = get_path("testdata/formattest-job.json").as_uri() jobfile = cwl_utils.parser.utils.load_inputfile_by_uri( @@ -1102,7 +1122,7 @@ def test_v1_2_type_for_source_with_single_entry_merge_flattened() -> None: def test_v1_2_type_for_source_with_multiple_entries_first_non_null() -> None: """Test that the type is correctly inferred from a list of source ids and first_non_null with CWL v1.2.""" - uri = get_path("testdata/cond-wf-003.1.cwl").as_uri() + uri = get_path("testdata/cond-wf-003.1.v1_2.cwl").as_uri() cwl_obj = load_document_by_uri(uri) source_type = cwl_utils.parser.utils.type_for_source( process=cwl_obj, @@ -1114,7 +1134,7 @@ def test_v1_2_type_for_source_with_multiple_entries_first_non_null() -> None: def test_v1_2_type_for_source_with_multiple_entries_the_only_non_null() -> None: """Test that the type is correctly inferred from a list of source ids and the_only_non_null with CWL v1.2.""" # noqa: B950 - uri = get_path("testdata/cond-wf-004.1.cwl").as_uri() + uri = get_path("testdata/cond-wf-004.1.v1_2.cwl").as_uri() cwl_obj = load_document_by_uri(uri) source_type = cwl_utils.parser.utils.type_for_source( process=cwl_obj, @@ -1126,7 +1146,7 @@ def test_v1_2_type_for_source_with_multiple_entries_the_only_non_null() -> None: def test_v1_2_type_for_source_with_multiple_entries_all_non_null() -> None: """Test that the type is correctly inferred from a list of source ids and all_non_null with CWL v1.2.""" - uri = get_path("testdata/cond-wf-005.1.cwl").as_uri() + uri = get_path("testdata/cond-wf-005.1.v1_2.cwl").as_uri() cwl_obj = load_document_by_uri(uri) source_type = cwl_utils.parser.utils.type_for_source( process=cwl_obj, @@ -1139,7 +1159,7 @@ def test_v1_2_type_for_source_with_multiple_entries_all_non_null() -> None: def test_v1_2_type_for_source_with_single_entry_first_non_null() -> None: """Test that the type is correctly inferred from a single source id and first_non_null with CWL v1.2.""" - uri = get_path("testdata/cond-single-source-wf-003.1.cwl").as_uri() + uri = get_path("testdata/cond-single-source-wf-003.1.v1_2.cwl").as_uri() cwl_obj = load_document_by_uri(uri) source_type = cwl_utils.parser.utils.type_for_source( process=cwl_obj, @@ -1151,7 +1171,7 @@ def test_v1_2_type_for_source_with_single_entry_first_non_null() -> None: def test_v1_2_type_for_source_with_single_entry_the_only_non_null() -> None: """Test that the type is correctly inferred from a single source id and the_only_non_null with CWL v1.2.""" # noqa: B950 - uri = get_path("testdata/cond-single-source-wf-004.1.cwl").as_uri() + uri = get_path("testdata/cond-single-source-wf-004.1.v1_2.cwl").as_uri() cwl_obj = load_document_by_uri(uri) source_type = cwl_utils.parser.utils.type_for_source( process=cwl_obj, @@ -1163,7 +1183,7 @@ def test_v1_2_type_for_source_with_single_entry_the_only_non_null() -> None: def test_v1_2_type_for_source_with_single_entry_all_non_null() -> None: """Test that the type is correctly inferred from a single source id and all_non_null with CWL v1.2.""" - uri = get_path("testdata/cond-single-source-wf-005.1.cwl").as_uri() + uri = get_path("testdata/cond-single-source-wf-005.1.v1_2.cwl").as_uri() cwl_obj = load_document_by_uri(uri) source_type = cwl_utils.parser.utils.type_for_source( process=cwl_obj, @@ -1172,3 +1192,427 @@ def test_v1_2_type_for_source_with_single_entry_all_non_null() -> None: ) assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) assert source_type.items == "string" + + +def test_v1_3_file_content_64_kB() -> None: + """Test that reading file content is allowed up to 64kB in CWL v1.3.""" + text = "a" * cwl_utils.parser.cwl_v1_3_utils.CONTENT_LIMIT + with tempfile.TemporaryFile() as f: + f.write(text.encode("utf-8")) + f.seek(0) + content = cwl_utils.parser.cwl_v1_3_utils.content_limit_respected_read(f) + assert content == text + + +def test_v1_3_file_content_larger_than_64_kB() -> None: + """Test that reading file content fails for files larger than 64kB in CWL v1.3.""" + text = "a" * (cwl_utils.parser.cwl_v1_3_utils.CONTENT_LIMIT + 1) + with tempfile.TemporaryFile() as f: + f.write(text.encode("utf-8")) + f.seek(0) + with raises(WorkflowException): + cwl_utils.parser.cwl_v1_3_utils.content_limit_respected_read(f) + + +def test_v_1_3_load_inputfile_with_format() -> None: + """Test that File object with the `format` field is correctly loaded with CWL v1.3.""" + cwl_uri = get_path("testdata/formattest2_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(cwl_uri) + job_uri = get_path("testdata/formattest-job.json").as_uri() + jobfile = cwl_utils.parser.utils.load_inputfile_by_uri( + "v1.3.0-dev1", job_uri, cwl_obj.loadingOptions + ) + assert jobfile["input"].format == cwl_obj.inputs[0].format + + +def test_v_1_3_load_inputfile_with_nested_array() -> None: + """Test that nested arrays are preserved when loading an input file with CWL v1.3.""" + uri = get_path("testdata/nested-array-job.yml").as_uri() + jobfile = cwl_utils.parser.utils.load_inputfile_by_uri("v1.3.0-dev1", uri) + assert isinstance(jobfile["letters"], MutableSequence) + assert isinstance(jobfile["letters"][0], MutableSequence) + assert jobfile["letters"][0][0] == "a" + + +def test_v_1_3_load_inputfile_with_requirements() -> None: + """Test that an input file with the cwl:requirements directive is correctly loaded with CWL v1.3.""" + uri = get_path("testdata/env-job3.yaml").as_uri() + jobfile = cwl_utils.parser.utils.load_inputfile_by_uri("v1.3.0-dev1", uri) + assert isinstance( + jobfile["cwl:requirements"][0], cwl_utils.parser.cwl_v1_3.EnvVarRequirement + ) + + +def test_v_1_3_load_inputfile_with_secondary_files() -> None: + """Test that secondary files are treated as objects when loading an input file with CWL v1.3.""" + uri = get_path("testdata/dir4-job.yml").as_uri() + jobfile = cwl_utils.parser.utils.load_inputfile_by_uri("v1.3.0-dev1", uri) + assert isinstance(jobfile["inf"].secondaryFiles[0], cwl_utils.parser.cwl_v1_3.File) + assert isinstance( + jobfile["inf"].secondaryFiles[1], cwl_utils.parser.cwl_v1_3.Directory + ) + + +def test_v1_3_stdout_to_file() -> None: + """Test that stdout shortcut is converted to stdout parameter with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[], + outputs=[ + cwl_utils.parser.cwl_v1_3.CommandOutputParameter(id="test", type_="stdout") + ], + ) + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + assert clt.stdout is not None + assert clt.stdout == clt.outputs[0].outputBinding.glob + + +def test_v1_3_stdout_to_file_with_binding() -> None: + """Test that outputBinding is not allowed with stdout shortcut with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[], + outputs=[ + cwl_utils.parser.cwl_v1_3.CommandOutputParameter( + id="test", + type_="stdout", + outputBinding=cwl_utils.parser.cwl_v1_3.CommandOutputBinding( + glob="output.txt" + ), + ) + ], + ) + with raises(ValidationException): + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + + +def test_v1_3_stdout_to_file_preserve_original() -> None: + """Test that stdout parameter prevails on stdout shortcut with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[], + outputs=[ + cwl_utils.parser.cwl_v1_3.CommandOutputParameter(id="test", type_="stdout") + ], + stdout="original.txt", + ) + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + assert clt.stdout == "original.txt" + assert clt.stdout == clt.outputs[0].outputBinding.glob + + +def test_v1_3_stderr_to_file() -> None: + """Test that stderr shortcut is converted to stderr parameter with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[], + outputs=[ + cwl_utils.parser.cwl_v1_3.CommandOutputParameter(id="test", type_="stderr") + ], + ) + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + assert clt.stderr is not None + assert clt.stderr == clt.outputs[0].outputBinding.glob + + +def test_v1_3_stderr_to_file_with_binding() -> None: + """Test that outputBinding is not allowed with stderr shortcut with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[], + outputs=[ + cwl_utils.parser.cwl_v1_3.CommandOutputParameter( + id="test", + type_="stderr", + outputBinding=cwl_utils.parser.cwl_v1_3.CommandOutputBinding( + glob="err.txt" + ), + ) + ], + ) + with raises(ValidationException): + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + + +def test_v1_3_stderr_to_file_preserve_original() -> None: + """Test that stderr parameter prevails on stdout shortcut with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[], + outputs=[ + cwl_utils.parser.cwl_v1_3.CommandOutputParameter(id="test", type_="stderr") + ], + stderr="original.txt", + ) + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + assert clt.stderr == "original.txt" + assert clt.stderr == clt.outputs[0].outputBinding.glob + + +def test_v1_3_stdin_to_file() -> None: + """Test that stdin shortcut is converted to stdin parameter with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[ + cwl_utils.parser.cwl_v1_3.CommandInputParameter(id="test", type_="stdin") + ], + outputs=[], + ) + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + assert clt.stdin is not None + + +def test_v1_3_stdin_to_file_with_binding() -> None: + """Test that inputBinding is not allowed with stdin shortcut with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[ + cwl_utils.parser.cwl_v1_3.CommandInputParameter( + id="test", + type_="stdin", + inputBinding=cwl_utils.parser.cwl_v1_3.CommandLineBinding( + prefix="--test" + ), + ) + ], + outputs=[], + ) + with raises(ValidationException): + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + + +def test_v1_3_stdin_to_file_fail_with_original() -> None: + """Test that stdin shortcut fails when stdin parameter is defined with CWL v1.3.""" + clt = cwl_utils.parser.cwl_v1_3.CommandLineTool( + inputs=[ + cwl_utils.parser.cwl_v1_3.CommandInputParameter(id="test", type_="stdin") + ], + outputs=[], + stdin="original.txt", + ) + with raises(ValidationException): + cwl_utils.parser.cwl_v1_3_utils.convert_stdstreams_to_files(clt) + + +def test_v1_3_type_compare_list() -> None: + """Test that the type comparison works correctly a list type with CWL v1.3.""" + uri = get_path("testdata/echo_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + assert cwl_utils.parser.cwl_v1_3_utils._compare_type( + cwl_obj.inputs[0].type_, cwl_obj.inputs[0].type_ + ) + + +def test_v1_3_type_compare_record() -> None: + """Test that the type comparison works correctly a record type with CWL v1.3.""" + uri = get_path("testdata/record-output-wf_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + ) + assert cwl_utils.parser.cwl_v1_3_utils._compare_type(source_type, source_type) + + +def test_v1_3_type_for_source() -> None: + """Test that the type is correctly inferred from a source id with CWL v1.3.""" + uri = get_path("testdata/step_valuefrom5_wf_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + cwl_obj, cwl_obj.loadingOptions.fileuri + "#step1/echo_out_file" + ) + assert source_type == "File" + + +def test_v1_3_type_for_source_with_id() -> None: + """Test that the type is correctly inferred from a source id with CWL v1.3.""" + uri = get_path("testdata/step_valuefrom5_wf_with_id_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + cwl_obj, cwl_obj.loadingOptions.fileuri + "#step1/echo_out_file" + ) + assert source_type == "File" + + +def test_v1_3_type_for_stdout() -> None: + """Test that the `stdout` type is correctly matched with the `File` type in CWL v1.3.""" + uri = get_path("testdata/stdout-wf_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + cwl_obj, cwl_obj.outputs[0].outputSource + ) + assert source_type == "File" + + +def test_v1_3_type_output_source_record() -> None: + """Test that the type is correctly inferred from a record output source with CWL v1.3.""" + uri = get_path("testdata/record-output-wf_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.RecordSchema) + fields = cast( + MutableSequence[cwl_utils.parser.cwl_v1_3.RecordField], source_type.fields + ) + assert len(fields) == 2 + assert fields[0].type_ == "File" + assert fields[1].type_ == "File" + + +def test_v1_3_type_for_output_source_with_single_scatter_step() -> None: + """Test that the type is correctly inferred from a single scatter step with CWL v1.3.""" + uri = get_path("testdata/scatter-wf1_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items == "string" + + +def test_v1_3_type_for_output_source_with_nested_crossproduct_scatter_step() -> None: + """Test that the type is correctly inferred from a nested_crossproduct scatter step with CWL v1.3.""" + uri = get_path("testdata/scatter-wf2_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items.items == "string" + + +def test_v1_3_type_for_output_source_with_flat_crossproduct_scatter_step() -> None: + """Test that the type is correctly inferred from a flat_crossproduct scatter step with CWL v1.3.""" + uri = get_path("testdata/scatter-wf3_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items == "string" + + +def test_v1_3_type_for_source_with_multiple_entries_merge_nested() -> None: + """Test that the type is correctly inferred from a list of source ids and merge_nested with CWL v1.3.""" + uri = get_path("testdata/count-lines6-wf_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.steps[0].in_[0].source, + linkMerge=cwl_obj.steps[0].in_[0].linkMerge, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items.items == "File" + + +def test_v1_3_type_for_source_with_multiple_entries_merge_flattened() -> None: + """Test that the type is correctly inferred from a list of source ids and merge_flattened with CWL v1.3.""" # noqa: B950 + uri = get_path("testdata/count-lines7-wf_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.steps[0].in_[0].source, + linkMerge=cwl_obj.steps[0].in_[0].linkMerge, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items == "File" + + +def test_v1_3_type_for_source_with_single_entry_merge_nested() -> None: + """Test that the type is correctly inferred from a single source id and merge_nested with CWL v1.3.""" + uri = get_path("testdata/count-lines6-single-source-wf_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.steps[0].in_[0].source, + linkMerge=cwl_obj.steps[0].in_[0].linkMerge, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items.items == "File" + + +def test_v1_3_type_for_source_with_single_entry_merge_flattened() -> None: + """Test that the type is correctly inferred from a single source id and merge_flattened with CWL v1.3.""" + uri = get_path("testdata/count-lines7-single-source-wf_v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.steps[0].in_[0].source, + linkMerge=cwl_obj.steps[0].in_[0].linkMerge, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items == "File" + + +def test_v1_3_type_for_source_with_multiple_entries_first_non_null() -> None: + """Test that the type is correctly inferred from a list of source ids and first_non_null with CWL v1.3.""" + uri = get_path("testdata/cond-wf-003.1.v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + pickValue=cwl_obj.outputs[0].pickValue, + ) + assert source_type == "string" + + +def test_v1_3_type_for_source_with_multiple_entries_the_only_non_null() -> None: + """Test that the type is correctly inferred from a list of source ids and the_only_non_null with CWL v1.3.""" # noqa: B950 + uri = get_path("testdata/cond-wf-004.1.v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + pickValue=cwl_obj.outputs[0].pickValue, + ) + assert source_type == "string" + + +def test_v1_3_type_for_source_with_multiple_entries_all_non_null() -> None: + """Test that the type is correctly inferred from a list of source ids and all_non_null with CWL v1.3.""" + uri = get_path("testdata/cond-wf-005.1.v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + pickValue=cwl_obj.outputs[0].pickValue, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items == "string" + + +def test_v1_3_type_for_source_with_single_entry_first_non_null() -> None: + """Test that the type is correctly inferred from a single source id and first_non_null with CWL v1.3.""" + uri = get_path("testdata/cond-single-source-wf-003.1.v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + pickValue=cwl_obj.outputs[0].pickValue, + ) + assert source_type == "string" + + +def test_v1_3_type_for_source_with_single_entry_the_only_non_null() -> None: + """Test that the type is correctly inferred from a single source id and the_only_non_null with CWL v1.3.""" # noqa: B950 + uri = get_path("testdata/cond-single-source-wf-004.1.v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + pickValue=cwl_obj.outputs[0].pickValue, + ) + assert source_type == "string" + + +def test_v1_3_type_for_source_with_single_entry_all_non_null() -> None: + """Test that the type is correctly inferred from a single source id and all_non_null with CWL v1.3.""" + uri = get_path("testdata/cond-single-source-wf-005.1.v1_3.cwl").as_uri() + cwl_obj = load_document_by_uri(uri) + source_type = cwl_utils.parser.utils.type_for_source( + process=cwl_obj, + sourcenames=cwl_obj.outputs[0].outputSource, + pickValue=cwl_obj.outputs[0].pickValue, + ) + assert isinstance(source_type, cwl_utils.parser.cwl_v1_3.ArraySchema) + assert source_type.items == "string"